var/home/core/zuul-output/0000755000175000017500000000000015073174300014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015073204720015471 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004652660715073204712017716 0ustar rootrootOct 13 13:07:24 crc systemd[1]: Starting Kubernetes Kubelet... Oct 13 13:07:24 crc restorecon[4657]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:24 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 13:07:25 crc restorecon[4657]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 13 13:07:26 crc kubenswrapper[4684]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 13:07:26 crc kubenswrapper[4684]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 13 13:07:26 crc kubenswrapper[4684]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 13:07:26 crc kubenswrapper[4684]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 13:07:26 crc kubenswrapper[4684]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 13 13:07:26 crc kubenswrapper[4684]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.074026 4684 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079002 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079041 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079048 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079053 4684 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079059 4684 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079064 4684 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079068 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079073 4684 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079079 4684 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079087 4684 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079095 4684 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079101 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079107 4684 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079112 4684 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079117 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079121 4684 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079126 4684 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079136 4684 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079141 4684 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079145 4684 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079149 4684 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079153 4684 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079159 4684 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079166 4684 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079172 4684 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079176 4684 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079181 4684 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079185 4684 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079189 4684 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079195 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079200 4684 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079205 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079210 4684 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079215 4684 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079219 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079224 4684 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079229 4684 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079233 4684 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079238 4684 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079243 4684 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079247 4684 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079254 4684 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079260 4684 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079265 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079271 4684 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079276 4684 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079281 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079286 4684 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079291 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079295 4684 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079300 4684 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079305 4684 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079309 4684 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079313 4684 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079318 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079322 4684 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079327 4684 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079331 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079337 4684 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079342 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079346 4684 feature_gate.go:330] unrecognized feature gate: Example Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079350 4684 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079355 4684 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079359 4684 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079364 4684 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079369 4684 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079373 4684 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079379 4684 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079383 4684 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079387 4684 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.079392 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079522 4684 flags.go:64] FLAG: --address="0.0.0.0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079534 4684 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079546 4684 flags.go:64] FLAG: --anonymous-auth="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079554 4684 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079563 4684 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079569 4684 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079577 4684 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079585 4684 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079591 4684 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079596 4684 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079602 4684 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079608 4684 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079614 4684 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079618 4684 flags.go:64] FLAG: --cgroup-root="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079624 4684 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079629 4684 flags.go:64] FLAG: --client-ca-file="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079635 4684 flags.go:64] FLAG: --cloud-config="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079640 4684 flags.go:64] FLAG: --cloud-provider="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079646 4684 flags.go:64] FLAG: --cluster-dns="[]" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079657 4684 flags.go:64] FLAG: --cluster-domain="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079662 4684 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079667 4684 flags.go:64] FLAG: --config-dir="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079673 4684 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079679 4684 flags.go:64] FLAG: --container-log-max-files="5" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079686 4684 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079692 4684 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079697 4684 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079704 4684 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079710 4684 flags.go:64] FLAG: --contention-profiling="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079715 4684 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079721 4684 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079727 4684 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079733 4684 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079741 4684 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079747 4684 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079753 4684 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079760 4684 flags.go:64] FLAG: --enable-load-reader="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079766 4684 flags.go:64] FLAG: --enable-server="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079771 4684 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079779 4684 flags.go:64] FLAG: --event-burst="100" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079784 4684 flags.go:64] FLAG: --event-qps="50" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079790 4684 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079796 4684 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079802 4684 flags.go:64] FLAG: --eviction-hard="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079810 4684 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079816 4684 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079822 4684 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079828 4684 flags.go:64] FLAG: --eviction-soft="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079834 4684 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079839 4684 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079845 4684 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079850 4684 flags.go:64] FLAG: --experimental-mounter-path="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079856 4684 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079861 4684 flags.go:64] FLAG: --fail-swap-on="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079867 4684 flags.go:64] FLAG: --feature-gates="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079873 4684 flags.go:64] FLAG: --file-check-frequency="20s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079879 4684 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079885 4684 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079891 4684 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079896 4684 flags.go:64] FLAG: --healthz-port="10248" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079921 4684 flags.go:64] FLAG: --help="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079926 4684 flags.go:64] FLAG: --hostname-override="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079932 4684 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079938 4684 flags.go:64] FLAG: --http-check-frequency="20s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079943 4684 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079948 4684 flags.go:64] FLAG: --image-credential-provider-config="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079955 4684 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079960 4684 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079968 4684 flags.go:64] FLAG: --image-service-endpoint="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079973 4684 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079979 4684 flags.go:64] FLAG: --kube-api-burst="100" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079985 4684 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079991 4684 flags.go:64] FLAG: --kube-api-qps="50" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.079997 4684 flags.go:64] FLAG: --kube-reserved="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080002 4684 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080007 4684 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080013 4684 flags.go:64] FLAG: --kubelet-cgroups="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080019 4684 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080024 4684 flags.go:64] FLAG: --lock-file="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080030 4684 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080036 4684 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080042 4684 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080051 4684 flags.go:64] FLAG: --log-json-split-stream="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080057 4684 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080062 4684 flags.go:64] FLAG: --log-text-split-stream="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080068 4684 flags.go:64] FLAG: --logging-format="text" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080073 4684 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080080 4684 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080086 4684 flags.go:64] FLAG: --manifest-url="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080091 4684 flags.go:64] FLAG: --manifest-url-header="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080099 4684 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080105 4684 flags.go:64] FLAG: --max-open-files="1000000" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080112 4684 flags.go:64] FLAG: --max-pods="110" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080119 4684 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080124 4684 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080130 4684 flags.go:64] FLAG: --memory-manager-policy="None" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080136 4684 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080143 4684 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080150 4684 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080155 4684 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080174 4684 flags.go:64] FLAG: --node-status-max-images="50" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080181 4684 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080188 4684 flags.go:64] FLAG: --oom-score-adj="-999" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080194 4684 flags.go:64] FLAG: --pod-cidr="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080208 4684 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080217 4684 flags.go:64] FLAG: --pod-manifest-path="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080223 4684 flags.go:64] FLAG: --pod-max-pids="-1" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080228 4684 flags.go:64] FLAG: --pods-per-core="0" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080234 4684 flags.go:64] FLAG: --port="10250" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080239 4684 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080245 4684 flags.go:64] FLAG: --provider-id="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080250 4684 flags.go:64] FLAG: --qos-reserved="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080255 4684 flags.go:64] FLAG: --read-only-port="10255" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080261 4684 flags.go:64] FLAG: --register-node="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080267 4684 flags.go:64] FLAG: --register-schedulable="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080272 4684 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080289 4684 flags.go:64] FLAG: --registry-burst="10" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080295 4684 flags.go:64] FLAG: --registry-qps="5" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080300 4684 flags.go:64] FLAG: --reserved-cpus="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080306 4684 flags.go:64] FLAG: --reserved-memory="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080314 4684 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080319 4684 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080325 4684 flags.go:64] FLAG: --rotate-certificates="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080330 4684 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080336 4684 flags.go:64] FLAG: --runonce="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080342 4684 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080347 4684 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080354 4684 flags.go:64] FLAG: --seccomp-default="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080359 4684 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080367 4684 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080373 4684 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080379 4684 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080385 4684 flags.go:64] FLAG: --storage-driver-password="root" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080391 4684 flags.go:64] FLAG: --storage-driver-secure="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080397 4684 flags.go:64] FLAG: --storage-driver-table="stats" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080402 4684 flags.go:64] FLAG: --storage-driver-user="root" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080408 4684 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080414 4684 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080419 4684 flags.go:64] FLAG: --system-cgroups="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080425 4684 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080435 4684 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080441 4684 flags.go:64] FLAG: --tls-cert-file="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080447 4684 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080454 4684 flags.go:64] FLAG: --tls-min-version="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080460 4684 flags.go:64] FLAG: --tls-private-key-file="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080465 4684 flags.go:64] FLAG: --topology-manager-policy="none" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080470 4684 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080475 4684 flags.go:64] FLAG: --topology-manager-scope="container" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080481 4684 flags.go:64] FLAG: --v="2" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080489 4684 flags.go:64] FLAG: --version="false" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080500 4684 flags.go:64] FLAG: --vmodule="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080507 4684 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.080513 4684 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080675 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080688 4684 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080694 4684 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080700 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080705 4684 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080710 4684 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080714 4684 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080718 4684 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080722 4684 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080726 4684 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080730 4684 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080733 4684 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080738 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080741 4684 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080745 4684 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080749 4684 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080753 4684 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080759 4684 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080766 4684 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080774 4684 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080779 4684 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080783 4684 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080788 4684 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080795 4684 feature_gate.go:330] unrecognized feature gate: Example Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080800 4684 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080804 4684 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080811 4684 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080817 4684 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080823 4684 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080828 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080833 4684 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080838 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080844 4684 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080850 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080856 4684 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080861 4684 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080866 4684 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080873 4684 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080878 4684 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080884 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080889 4684 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080894 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080917 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080922 4684 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080928 4684 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080933 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080938 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080943 4684 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080948 4684 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080953 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080957 4684 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080962 4684 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080966 4684 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080971 4684 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080976 4684 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080981 4684 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080985 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080990 4684 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.080994 4684 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081002 4684 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081007 4684 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081017 4684 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081021 4684 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081026 4684 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081030 4684 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081034 4684 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081039 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081043 4684 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081048 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081052 4684 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.081056 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.082022 4684 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.094853 4684 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.094896 4684 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095003 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095011 4684 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095015 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095020 4684 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095024 4684 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095027 4684 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095032 4684 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095035 4684 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095039 4684 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095043 4684 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095047 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095051 4684 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095054 4684 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095058 4684 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095061 4684 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095065 4684 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095068 4684 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095072 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095076 4684 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095079 4684 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095084 4684 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095088 4684 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095094 4684 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095102 4684 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095107 4684 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095112 4684 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095117 4684 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095121 4684 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095125 4684 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095130 4684 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095134 4684 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095138 4684 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095143 4684 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095147 4684 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095154 4684 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095159 4684 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095164 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095169 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095173 4684 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095177 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095181 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095185 4684 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095189 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095193 4684 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095197 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095202 4684 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095206 4684 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095211 4684 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095215 4684 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095219 4684 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095224 4684 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095230 4684 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095234 4684 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095238 4684 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095242 4684 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095245 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095249 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095253 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095257 4684 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095261 4684 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095266 4684 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095270 4684 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095273 4684 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095277 4684 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095281 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095284 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095288 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095292 4684 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095299 4684 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095304 4684 feature_gate.go:330] unrecognized feature gate: Example Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095308 4684 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.095315 4684 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095460 4684 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095469 4684 feature_gate.go:330] unrecognized feature gate: Example Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095474 4684 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095478 4684 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095483 4684 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095487 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095491 4684 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095495 4684 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095500 4684 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095503 4684 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095507 4684 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095511 4684 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095515 4684 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095520 4684 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095524 4684 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095527 4684 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095531 4684 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095534 4684 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095537 4684 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095541 4684 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095544 4684 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095547 4684 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095551 4684 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095554 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095558 4684 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095561 4684 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095566 4684 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095570 4684 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095574 4684 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095578 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095582 4684 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095586 4684 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095590 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095595 4684 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095601 4684 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095605 4684 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095609 4684 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095614 4684 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095618 4684 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095622 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095626 4684 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095630 4684 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095634 4684 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095638 4684 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095643 4684 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095648 4684 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095652 4684 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095656 4684 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095660 4684 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095664 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095669 4684 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095673 4684 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095677 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095681 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095685 4684 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095689 4684 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095693 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095697 4684 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095701 4684 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095705 4684 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095709 4684 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095714 4684 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095719 4684 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095723 4684 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095728 4684 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095734 4684 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095739 4684 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095743 4684 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095747 4684 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095751 4684 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.095756 4684 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.095762 4684 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.098952 4684 server.go:940] "Client rotation is on, will bootstrap in background" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.102640 4684 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.102737 4684 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.104920 4684 server.go:997] "Starting client certificate rotation" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.104947 4684 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.105797 4684 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-07 04:28:56.312419857 +0000 UTC Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.105957 4684 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 591h21m30.206467542s for next certificate rotation Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.127999 4684 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.129872 4684 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.173095 4684 log.go:25] "Validated CRI v1 runtime API" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.221215 4684 log.go:25] "Validated CRI v1 image API" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.222948 4684 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.228190 4684 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-13-13-03-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.228221 4684 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.245992 4684 manager.go:217] Machine: {Timestamp:2025-10-13 13:07:26.241004618 +0000 UTC m=+0.808388708 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:0f296227-953c-4cb6-a3a1-229df6b9f745 BootID:7a36ef7b-b5c3-4d0b-98f0-0d4235840e71 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:66:f6:c0 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:66:f6:c0 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:f1:51:f3 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:e4:56:6b Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f5:f6:39 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:52:3d:fe Speed:-1 Mtu:1496} {Name:eth10 MacAddress:02:fc:20:48:96:3d Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:be:a4:76:4d:cd:d6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.246645 4684 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.246893 4684 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.252453 4684 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.252718 4684 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.252757 4684 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.253007 4684 topology_manager.go:138] "Creating topology manager with none policy" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.253020 4684 container_manager_linux.go:303] "Creating device plugin manager" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.253758 4684 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.253787 4684 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.254556 4684 state_mem.go:36] "Initialized new in-memory state store" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.255130 4684 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.259064 4684 kubelet.go:418] "Attempting to sync node with API server" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.259088 4684 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.259113 4684 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.259125 4684 kubelet.go:324] "Adding apiserver pod source" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.259138 4684 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.263628 4684 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.264723 4684 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.265943 4684 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.270986 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271032 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271042 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271050 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271063 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271072 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271081 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271099 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271300 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271403 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271798 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.271860 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.271812 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.271815 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.274779 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.274738 4684 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.274797 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.275713 4684 server.go:1280] "Started kubelet" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.277138 4684 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.277633 4684 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.277623 4684 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 13 13:07:26 crc systemd[1]: Started Kubernetes Kubelet. Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.278553 4684 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280539 4684 server.go:460] "Adding debug handlers to kubelet server" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280567 4684 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280659 4684 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280753 4684 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 03:15:54.913686017 +0000 UTC Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280786 4684 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1550h8m28.632901634s for next certificate rotation Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280959 4684 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.280982 4684 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.281114 4684 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.281485 4684 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="200ms" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.281013 4684 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.282554 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.282724 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.283248 4684 factory.go:55] Registering systemd factory Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.283291 4684 factory.go:221] Registration of the systemd container factory successfully Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.283749 4684 factory.go:153] Registering CRI-O factory Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.283790 4684 factory.go:221] Registration of the crio container factory successfully Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.284039 4684 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.284102 4684 factory.go:103] Registering Raw factory Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.284137 4684 manager.go:1196] Started watching for new ooms in manager Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.285573 4684 manager.go:319] Starting recovery of all containers Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.285406 4684 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.212:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186e0edce6c43ca6 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-13 13:07:26.27565687 +0000 UTC m=+0.843040940,LastTimestamp:2025-10-13 13:07:26.27565687 +0000 UTC m=+0.843040940,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.292739 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293541 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293587 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293630 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293658 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293704 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293732 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293762 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293809 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293837 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293879 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293938 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.293977 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294013 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294050 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294077 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294109 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294149 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294179 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294216 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294245 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294276 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294316 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294344 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294383 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294412 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294462 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294495 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294533 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294564 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294602 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294636 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294666 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294704 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294735 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294773 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294803 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294828 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294865 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294932 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.294976 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.295005 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.295038 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.295374 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.295400 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296358 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296384 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296399 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296414 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296426 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296438 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296450 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296471 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296486 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296501 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296513 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296565 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296576 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296586 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296596 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296606 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296640 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296650 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296661 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296672 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296684 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296695 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296728 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296737 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296749 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296760 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296769 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296781 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296793 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296823 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296846 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296859 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296871 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296882 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296894 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296930 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296943 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296955 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296971 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296981 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.296991 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300595 4684 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300696 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300711 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300723 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300736 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300748 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300761 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300771 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300784 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300796 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300807 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300818 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300830 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300841 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300852 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300863 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300874 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300888 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300917 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300936 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300951 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300966 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300978 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.300989 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301000 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301011 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301021 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301032 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301044 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301056 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301068 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301079 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301090 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301295 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301311 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301327 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301342 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301355 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301370 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301386 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301398 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301414 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301425 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301443 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301456 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301467 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301478 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301490 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301501 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301514 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301525 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301537 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301549 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301560 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301571 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301584 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301599 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301612 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301627 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301637 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301647 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301658 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301669 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301679 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301689 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301699 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301709 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301719 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301728 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301748 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301759 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301768 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301779 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301789 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301802 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301812 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301823 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301832 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301844 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301853 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301863 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301872 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301883 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301893 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301918 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301929 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301939 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301948 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301959 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301974 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301983 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.301994 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302004 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302012 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302022 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302031 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302043 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302054 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302063 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302074 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302084 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302093 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302104 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302113 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302123 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302133 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302372 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302389 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302400 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302410 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302421 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302431 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302441 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302452 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302463 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302473 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302483 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302493 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302504 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302514 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302524 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302533 4684 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302543 4684 reconstruct.go:97] "Volume reconstruction finished" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.302550 4684 reconciler.go:26] "Reconciler: start to sync state" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.309326 4684 manager.go:324] Recovery completed Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.320191 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.322211 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.322258 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.322269 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.323249 4684 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.323277 4684 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.323362 4684 state_mem.go:36] "Initialized new in-memory state store" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.346304 4684 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.349303 4684 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.349361 4684 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.349396 4684 kubelet.go:2335] "Starting kubelet main sync loop" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.349449 4684 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.355367 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.355443 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.367781 4684 policy_none.go:49] "None policy: Start" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.369449 4684 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.369486 4684 state_mem.go:35] "Initializing new in-memory state store" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.381584 4684 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.430621 4684 manager.go:334] "Starting Device Plugin manager" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.430704 4684 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.430725 4684 server.go:79] "Starting device plugin registration server" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.431421 4684 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.431443 4684 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.431710 4684 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.431819 4684 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.431831 4684 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.438299 4684 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.450537 4684 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.450627 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.451822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.451897 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.451962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.452239 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.452415 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.452476 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453443 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453470 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453481 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453794 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453815 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453826 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.453957 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.454170 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.454252 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.454822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.454853 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.454865 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.455018 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.455145 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.455199 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.455663 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.455721 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.455736 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456346 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456390 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456404 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456406 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456439 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456456 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456502 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456712 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.456759 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457589 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457646 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457738 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457770 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457781 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.457931 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.458007 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.459029 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.459072 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.459085 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.482770 4684 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="400ms" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.503890 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.503973 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504005 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504027 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504048 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504068 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504389 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504459 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504476 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504490 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504547 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504565 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504618 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504637 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.504653 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.532308 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.533601 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.533702 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.533719 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.533754 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.534444 4684 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.605771 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606313 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606343 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606373 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606388 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606404 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606430 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606497 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606512 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606444 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606453 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606560 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606593 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606659 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606780 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606814 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606832 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606873 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606880 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606893 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606934 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606962 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606983 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.607003 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.607186 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.607197 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.607255 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.606984 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.607320 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.607377 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.735041 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.736555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.736608 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.736627 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.736660 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.737185 4684 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.797439 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.814637 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.837114 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-2df310fc456bf046074b70e57c9bc055022095282ba6efa80e54f0cfbd4588f9 WatchSource:0}: Error finding container 2df310fc456bf046074b70e57c9bc055022095282ba6efa80e54f0cfbd4588f9: Status 404 returned error can't find the container with id 2df310fc456bf046074b70e57c9bc055022095282ba6efa80e54f0cfbd4588f9 Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.842519 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.843594 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-b7c6999fa966cb08fcdbcb90395ad24fa03f268724743915dcec9da2f2870655 WatchSource:0}: Error finding container b7c6999fa966cb08fcdbcb90395ad24fa03f268724743915dcec9da2f2870655: Status 404 returned error can't find the container with id b7c6999fa966cb08fcdbcb90395ad24fa03f268724743915dcec9da2f2870655 Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.860048 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-d61a036cd819c5be2fcf21ffa46db4957cfe31c5b74ca131dee9bbea25e73e1e WatchSource:0}: Error finding container d61a036cd819c5be2fcf21ffa46db4957cfe31c5b74ca131dee9bbea25e73e1e: Status 404 returned error can't find the container with id d61a036cd819c5be2fcf21ffa46db4957cfe31c5b74ca131dee9bbea25e73e1e Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.866769 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: I1013 13:07:26.871734 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.881593 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-4766a34a7f9d1ee3c933cf3882983a9f93d25800cfb99a9586f6f16a169616c3 WatchSource:0}: Error finding container 4766a34a7f9d1ee3c933cf3882983a9f93d25800cfb99a9586f6f16a169616c3: Status 404 returned error can't find the container with id 4766a34a7f9d1ee3c933cf3882983a9f93d25800cfb99a9586f6f16a169616c3 Oct 13 13:07:26 crc kubenswrapper[4684]: W1013 13:07:26.882412 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-689e26f58d17e51674b316f3912cf01fd950ed318266f55b20141a10169060ff WatchSource:0}: Error finding container 689e26f58d17e51674b316f3912cf01fd950ed318266f55b20141a10169060ff: Status 404 returned error can't find the container with id 689e26f58d17e51674b316f3912cf01fd950ed318266f55b20141a10169060ff Oct 13 13:07:26 crc kubenswrapper[4684]: E1013 13:07:26.883457 4684 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="800ms" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.138382 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.140803 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.140887 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.140926 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.140968 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.141595 4684 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.279003 4684 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.353937 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2df310fc456bf046074b70e57c9bc055022095282ba6efa80e54f0cfbd4588f9"} Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.354729 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b7c6999fa966cb08fcdbcb90395ad24fa03f268724743915dcec9da2f2870655"} Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.356062 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"689e26f58d17e51674b316f3912cf01fd950ed318266f55b20141a10169060ff"} Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.356849 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4766a34a7f9d1ee3c933cf3882983a9f93d25800cfb99a9586f6f16a169616c3"} Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.360209 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d61a036cd819c5be2fcf21ffa46db4957cfe31c5b74ca131dee9bbea25e73e1e"} Oct 13 13:07:27 crc kubenswrapper[4684]: W1013 13:07:27.421658 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.421763 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:27 crc kubenswrapper[4684]: W1013 13:07:27.486427 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.486557 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:27 crc kubenswrapper[4684]: W1013 13:07:27.538979 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.539078 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.684590 4684 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="1.6s" Oct 13 13:07:27 crc kubenswrapper[4684]: W1013 13:07:27.859741 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.859833 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.212:6443: connect: connection refused" logger="UnhandledError" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.941973 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.943349 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.943384 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.943395 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:27 crc kubenswrapper[4684]: I1013 13:07:27.943447 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:27 crc kubenswrapper[4684]: E1013 13:07:27.943995 4684 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.278706 4684 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.364868 4684 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79" exitCode=0 Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.364971 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.365142 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.366130 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.366194 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.366203 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.366716 4684 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338" exitCode=0 Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.366785 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.366820 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.367690 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.367722 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.367735 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.369145 4684 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="3b2142d8d94e4332daac837f6666cb8d1ebf547154443c6923ed086c9178c21f" exitCode=0 Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.369199 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.369537 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"3b2142d8d94e4332daac837f6666cb8d1ebf547154443c6923ed086c9178c21f"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.369660 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.370384 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.370413 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.370448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.370472 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.370491 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.370473 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.371792 4684 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94" exitCode=0 Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.371842 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.371943 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.373230 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.373268 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.373281 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.375300 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.375338 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.375353 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.375370 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd"} Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.375480 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.377316 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.377354 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:28 crc kubenswrapper[4684]: I1013 13:07:28.377368 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.278302 4684 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.212:6443: connect: connection refused Oct 13 13:07:29 crc kubenswrapper[4684]: E1013 13:07:29.286370 4684 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.212:6443: connect: connection refused" interval="3.2s" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.386770 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.386827 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.386844 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.386894 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.390151 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.390187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.390199 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.394380 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.394791 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.396944 4684 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f" exitCode=0 Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.397106 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.407010 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.407062 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.407087 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.408962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.409055 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.409084 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.417641 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.417988 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"012a38cb4bafb7cf4b300f20b1d51b0af45dbcf83ac534f47fd0be4914cc470a"} Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.418006 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.421645 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.421698 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.421665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.421746 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.421762 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.421720 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.544431 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.545980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.546036 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.546050 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:29 crc kubenswrapper[4684]: I1013 13:07:29.546097 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:29 crc kubenswrapper[4684]: E1013 13:07:29.546454 4684 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.212:6443: connect: connection refused" node="crc" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.423318 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174"} Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.424024 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.425296 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.425340 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.425349 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.427463 4684 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3" exitCode=0 Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.427571 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.427557 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3"} Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.427613 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.427678 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.427873 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.428694 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.428786 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.428853 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.428971 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.429002 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.429016 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.429222 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.429248 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.429259 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.773645 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.773834 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.774920 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.774949 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:30 crc kubenswrapper[4684]: I1013 13:07:30.774959 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.436308 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5"} Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.436389 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0"} Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.436404 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.436489 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.436411 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59"} Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.436571 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7"} Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.441967 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.442041 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:31 crc kubenswrapper[4684]: I1013 13:07:31.442060 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.446216 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed"} Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.446396 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.447757 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.447791 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.447803 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.747529 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.749603 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.749647 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.749660 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:32 crc kubenswrapper[4684]: I1013 13:07:32.749718 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.172334 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.172483 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.172523 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.173968 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.174008 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.174021 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.340652 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.340958 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.342544 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.342601 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.342621 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.449661 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.451198 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.451260 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.451274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.675770 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.676095 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.677643 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.677700 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.677720 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.776012 4684 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 13 13:07:33 crc kubenswrapper[4684]: I1013 13:07:33.776139 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.226964 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.227191 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.228927 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.228975 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.228992 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.738675 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.738966 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.740656 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.740741 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.740770 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.864438 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.864727 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.866271 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.866331 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:34 crc kubenswrapper[4684]: I1013 13:07:34.866347 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:35 crc kubenswrapper[4684]: I1013 13:07:35.635339 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 13 13:07:35 crc kubenswrapper[4684]: I1013 13:07:35.635641 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:35 crc kubenswrapper[4684]: I1013 13:07:35.637998 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:35 crc kubenswrapper[4684]: I1013 13:07:35.638055 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:35 crc kubenswrapper[4684]: I1013 13:07:35.638078 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:36 crc kubenswrapper[4684]: E1013 13:07:36.438494 4684 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.453615 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.453834 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.455273 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.455330 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.455343 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.470474 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.470661 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.470888 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.472216 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.472282 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.472308 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:37 crc kubenswrapper[4684]: I1013 13:07:37.476566 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:38 crc kubenswrapper[4684]: I1013 13:07:38.464185 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:38 crc kubenswrapper[4684]: I1013 13:07:38.465072 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:38 crc kubenswrapper[4684]: I1013 13:07:38.465110 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:38 crc kubenswrapper[4684]: I1013 13:07:38.465121 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:39 crc kubenswrapper[4684]: I1013 13:07:39.466265 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:39 crc kubenswrapper[4684]: I1013 13:07:39.467103 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:39 crc kubenswrapper[4684]: I1013 13:07:39.467153 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:39 crc kubenswrapper[4684]: I1013 13:07:39.467168 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:39 crc kubenswrapper[4684]: W1013 13:07:39.951758 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 13 13:07:39 crc kubenswrapper[4684]: I1013 13:07:39.951865 4684 trace.go:236] Trace[1549951938]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 13:07:29.950) (total time: 10001ms): Oct 13 13:07:39 crc kubenswrapper[4684]: Trace[1549951938]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:07:39.951) Oct 13 13:07:39 crc kubenswrapper[4684]: Trace[1549951938]: [10.001146429s] [10.001146429s] END Oct 13 13:07:39 crc kubenswrapper[4684]: E1013 13:07:39.951917 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 13 13:07:40 crc kubenswrapper[4684]: W1013 13:07:40.185567 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.185710 4684 trace.go:236] Trace[791625656]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 13:07:30.184) (total time: 10001ms): Oct 13 13:07:40 crc kubenswrapper[4684]: Trace[791625656]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:07:40.185) Oct 13 13:07:40 crc kubenswrapper[4684]: Trace[791625656]: [10.001575697s] [10.001575697s] END Oct 13 13:07:40 crc kubenswrapper[4684]: E1013 13:07:40.185754 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 13 13:07:40 crc kubenswrapper[4684]: E1013 13:07:40.256622 4684 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.186e0edce6c43ca6 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-13 13:07:26.27565687 +0000 UTC m=+0.843040940,LastTimestamp:2025-10-13 13:07:26.27565687 +0000 UTC m=+0.843040940,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.278266 4684 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 13 13:07:40 crc kubenswrapper[4684]: W1013 13:07:40.545748 4684 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.545983 4684 trace.go:236] Trace[973403280]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 13:07:30.544) (total time: 10001ms): Oct 13 13:07:40 crc kubenswrapper[4684]: Trace[973403280]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:07:40.545) Oct 13 13:07:40 crc kubenswrapper[4684]: Trace[973403280]: [10.001364912s] [10.001364912s] END Oct 13 13:07:40 crc kubenswrapper[4684]: E1013 13:07:40.546036 4684 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.765044 4684 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.765163 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.772738 4684 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 13 13:07:40 crc kubenswrapper[4684]: I1013 13:07:40.772841 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.177153 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.177323 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.178376 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.178405 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.178415 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.182175 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.477624 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.478948 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.479020 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.479036 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.774189 4684 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 13 13:07:43 crc kubenswrapper[4684]: I1013 13:07:43.774277 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.237357 4684 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.890370 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.890603 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.892063 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.892120 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.892132 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.902480 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 13 13:07:44 crc kubenswrapper[4684]: I1013 13:07:44.937187 4684 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.136234 4684 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.271309 4684 apiserver.go:52] "Watching apiserver" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.277119 4684 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.277571 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.278015 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.278322 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.278401 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.279065 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.279280 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.279412 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.279068 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.279289 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.279520 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.281926 4684 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.282169 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.282190 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.282277 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.282785 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.283099 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.283127 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.283132 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.283169 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.283244 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.318520 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.334135 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.346754 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.359533 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.375439 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.386542 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.400986 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.502695 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.756037 4684 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.759640 4684 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.760272 4684 trace.go:236] Trace[1773815893]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 13:07:30.854) (total time: 14905ms): Oct 13 13:07:45 crc kubenswrapper[4684]: Trace[1773815893]: ---"Objects listed" error: 14905ms (13:07:45.760) Oct 13 13:07:45 crc kubenswrapper[4684]: Trace[1773815893]: [14.905604378s] [14.905604378s] END Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.760311 4684 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.764762 4684 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.808801 4684 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37442->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.808893 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37442->192.168.126.11:17697: read: connection reset by peer" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.808801 4684 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37426->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.809007 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37426->192.168.126.11:17697: read: connection reset by peer" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.809333 4684 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.809360 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.809586 4684 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.809624 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861485 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861555 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861582 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861611 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861637 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861661 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861689 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861713 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861740 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861765 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861787 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861809 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861834 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861856 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861882 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861934 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861966 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.861996 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862022 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862057 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862093 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862126 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862155 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862182 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862216 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862243 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862268 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862293 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862330 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862358 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862390 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862424 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862456 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862483 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862534 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862563 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862591 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862617 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862648 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862677 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862701 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862734 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862761 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862791 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862818 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862844 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862888 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862940 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862970 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.862999 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863026 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863053 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863076 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863102 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863135 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863154 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863174 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863204 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863230 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863254 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863304 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863330 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863355 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863381 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863407 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863432 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863456 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863479 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863508 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863530 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863559 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863585 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863611 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863634 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863657 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863684 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863706 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863729 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863754 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863777 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863798 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863821 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863843 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863865 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863889 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863939 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863970 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.863995 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864018 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864223 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864245 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864270 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864294 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864321 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864344 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864368 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864391 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864416 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864444 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864472 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864497 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864522 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864552 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864580 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864605 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864632 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864657 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864681 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864705 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864728 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864752 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864776 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864800 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864824 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864848 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864877 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864923 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864950 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.864978 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865002 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865027 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865055 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865080 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865107 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865133 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865158 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865183 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865207 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865230 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865257 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865282 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865308 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865333 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865360 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865409 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865434 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865474 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865499 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865523 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865546 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865569 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865591 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865614 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865640 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865665 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865689 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865714 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865739 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865764 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865792 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865822 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865848 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865873 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865897 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865956 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.865984 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866009 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866151 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866174 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866192 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866210 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866228 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866246 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866266 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866286 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866308 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866329 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866348 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866370 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866389 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866410 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866428 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866448 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866468 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866486 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866506 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866525 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866544 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866562 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866580 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866601 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866619 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866639 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866659 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866678 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866697 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866715 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866733 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866751 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866771 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866790 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866820 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866844 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866871 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866895 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.866977 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867044 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867073 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867102 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867135 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867165 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867212 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867243 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867264 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867301 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867330 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867352 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867375 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.867400 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.868766 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.868863 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.869121 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.869307 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.869519 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.869706 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.869885 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.870039 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:07:46.370018446 +0000 UTC m=+20.937402516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.870178 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.870219 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.870396 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.870744 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.870825 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871033 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871228 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871308 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871583 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871681 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871832 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871971 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871951 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.871981 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872080 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872300 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872364 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872378 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872412 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872439 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872793 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872830 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872829 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872870 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.872980 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.873153 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.873304 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.873536 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.873569 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.873870 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.873875 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.874196 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.874242 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.874752 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.874768 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.874774 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.874945 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.875073 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.875304 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.875442 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.875505 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.875975 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.876311 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.876530 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.876577 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.876968 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.877306 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.877595 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.877839 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.878495 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.878581 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:46.378556409 +0000 UTC m=+20.945940659 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.878641 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.878745 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.878871 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879066 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879333 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879437 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879479 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879939 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879945 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.879951 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880037 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880158 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880251 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880267 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880296 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880398 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.880504 4684 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.882845 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.883193 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.883582 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.884003 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.884286 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.884617 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.884732 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.884937 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885015 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885050 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885272 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885463 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885630 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885634 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.885682 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.886390 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.886594 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.886681 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.886774 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.887201 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.887478 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.887516 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.888068 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.888143 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.888299 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.888307 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.888719 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.888726 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.889287 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.889652 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.889719 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.889770 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:46.389747114 +0000 UTC m=+20.957131204 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890060 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890083 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890240 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890382 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890453 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890608 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890698 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.890746 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.891240 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.891788 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.895044 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.895152 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.895371 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.896159 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.896574 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.895391 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.896608 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.896643 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.896715 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.897118 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.897175 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.897272 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.897355 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.897542 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.897748 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.897807 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.897825 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.897980 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.897985 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:46.397966727 +0000 UTC m=+20.965350797 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.898162 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.898263 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.898671 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.899338 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.899381 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.899664 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.899660 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.900568 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.901069 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.901179 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.901593 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.901603 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.902297 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.902752 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.902806 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.902836 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.903084 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.903436 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.903576 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.903739 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.904199 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.907284 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.907838 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.907932 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.908532 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.917549 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.918103 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.918211 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.918235 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.918411 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.918859 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.919072 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.919079 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.919343 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.919614 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.919938 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.920969 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.921543 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.921883 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.922194 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.922236 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.922328 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.922467 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.922974 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.923041 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.923624 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.923839 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.923926 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.924210 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.924380 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.924713 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.924738 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.924752 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:45 crc kubenswrapper[4684]: E1013 13:07:45.924810 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:46.424790264 +0000 UTC m=+20.992174334 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.925629 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.925951 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.926044 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.926433 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.926756 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.927261 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.927329 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.927966 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.928200 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.908522 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.929512 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.931410 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.933334 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.933579 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.933667 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.933872 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.936571 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.936665 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.948402 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.949291 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.955033 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.956273 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.958438 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.967011 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-p9ngm"] Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.967530 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969131 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969181 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969264 4684 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969287 4684 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969303 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969316 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969328 4684 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969342 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969373 4684 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969386 4684 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969399 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969411 4684 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969425 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969438 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969452 4684 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969465 4684 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969478 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969490 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969506 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969518 4684 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969532 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969545 4684 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969558 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969571 4684 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969585 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969600 4684 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969613 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969627 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969640 4684 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969652 4684 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969664 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969677 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969688 4684 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969701 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969713 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969727 4684 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969738 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969750 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969763 4684 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969776 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969789 4684 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969801 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969813 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969825 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969844 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969856 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969870 4684 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969882 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.970227 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.970328 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.970458 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.977494 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.969894 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978058 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978079 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978104 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978119 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978133 4684 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978147 4684 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978160 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978173 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978186 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978197 4684 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978208 4684 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978220 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978232 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978249 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978261 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978272 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978286 4684 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978297 4684 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978309 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978320 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978355 4684 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978369 4684 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978383 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978397 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978420 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978455 4684 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978467 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978480 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978494 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978506 4684 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978517 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978529 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978542 4684 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978554 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978565 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978577 4684 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978589 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978604 4684 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978616 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978628 4684 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978639 4684 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978651 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978663 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978675 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978686 4684 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978701 4684 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978712 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978724 4684 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978742 4684 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978754 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978765 4684 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978777 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978791 4684 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978803 4684 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978817 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978828 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978841 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978851 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978863 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978875 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978888 4684 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978921 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978934 4684 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978945 4684 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978956 4684 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978968 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978979 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.978990 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979002 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979013 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979025 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979036 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979048 4684 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979059 4684 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979071 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979084 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979097 4684 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979110 4684 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979122 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979148 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979163 4684 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979178 4684 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979190 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979201 4684 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979214 4684 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979225 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979239 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979250 4684 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979261 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979274 4684 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979284 4684 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979296 4684 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979306 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979317 4684 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979327 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979340 4684 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979351 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979362 4684 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979373 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979386 4684 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979400 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979411 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979423 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979435 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979448 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979459 4684 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979471 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979483 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979495 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979508 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979519 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979530 4684 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979542 4684 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979563 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979575 4684 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979586 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979597 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979608 4684 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979619 4684 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979631 4684 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979643 4684 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979654 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979665 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979675 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979687 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979698 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979712 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979726 4684 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979749 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979760 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979771 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979782 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979795 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979813 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979824 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979836 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979855 4684 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979868 4684 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979879 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979891 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979920 4684 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979932 4684 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979942 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.979954 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.981082 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.982058 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.985625 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:45 crc kubenswrapper[4684]: I1013 13:07:45.995985 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.006840 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.045250 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.071172 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.080726 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks5cd\" (UniqueName: \"kubernetes.io/projected/310416cc-40e9-4131-a1fc-535d397195b7-kube-api-access-ks5cd\") pod \"node-resolver-p9ngm\" (UID: \"310416cc-40e9-4131-a1fc-535d397195b7\") " pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.080810 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/310416cc-40e9-4131-a1fc-535d397195b7-hosts-file\") pod \"node-resolver-p9ngm\" (UID: \"310416cc-40e9-4131-a1fc-535d397195b7\") " pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.080957 4684 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.080976 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.080986 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.090329 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.103444 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.113319 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.123376 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.132170 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.182004 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/310416cc-40e9-4131-a1fc-535d397195b7-hosts-file\") pod \"node-resolver-p9ngm\" (UID: \"310416cc-40e9-4131-a1fc-535d397195b7\") " pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.182063 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks5cd\" (UniqueName: \"kubernetes.io/projected/310416cc-40e9-4131-a1fc-535d397195b7-kube-api-access-ks5cd\") pod \"node-resolver-p9ngm\" (UID: \"310416cc-40e9-4131-a1fc-535d397195b7\") " pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.182353 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/310416cc-40e9-4131-a1fc-535d397195b7-hosts-file\") pod \"node-resolver-p9ngm\" (UID: \"310416cc-40e9-4131-a1fc-535d397195b7\") " pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.202516 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks5cd\" (UniqueName: \"kubernetes.io/projected/310416cc-40e9-4131-a1fc-535d397195b7-kube-api-access-ks5cd\") pod \"node-resolver-p9ngm\" (UID: \"310416cc-40e9-4131-a1fc-535d397195b7\") " pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.205185 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 13:07:46 crc kubenswrapper[4684]: W1013 13:07:46.216214 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-bbb91ecb6de908d31e4f3c0c0c1da5fb8c62c9f1ea0d1df6f95babc732f7785e WatchSource:0}: Error finding container bbb91ecb6de908d31e4f3c0c0c1da5fb8c62c9f1ea0d1df6f95babc732f7785e: Status 404 returned error can't find the container with id bbb91ecb6de908d31e4f3c0c0c1da5fb8c62c9f1ea0d1df6f95babc732f7785e Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.227783 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 13:07:46 crc kubenswrapper[4684]: W1013 13:07:46.242927 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-93cc38b258854c8d513b3dd823a78cecb4508d48675cd7708522777a93ba02d6 WatchSource:0}: Error finding container 93cc38b258854c8d513b3dd823a78cecb4508d48675cd7708522777a93ba02d6: Status 404 returned error can't find the container with id 93cc38b258854c8d513b3dd823a78cecb4508d48675cd7708522777a93ba02d6 Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.309648 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-p9ngm" Oct 13 13:07:46 crc kubenswrapper[4684]: W1013 13:07:46.324318 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod310416cc_40e9_4131_a1fc_535d397195b7.slice/crio-0f302000ce0eb81518172658bd58cf875a16561ff23ccbb44dd175e85c9eee4c WatchSource:0}: Error finding container 0f302000ce0eb81518172658bd58cf875a16561ff23ccbb44dd175e85c9eee4c: Status 404 returned error can't find the container with id 0f302000ce0eb81518172658bd58cf875a16561ff23ccbb44dd175e85c9eee4c Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.357970 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.358630 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.360301 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.361353 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.362466 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.363556 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.364207 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.365184 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.365850 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.366787 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.367289 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.368460 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.369212 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.369351 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.370266 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.370865 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.371998 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.372598 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.373484 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.374085 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.375008 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.375491 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.376474 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.376929 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.378071 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.378492 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.379139 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.379256 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.380546 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.381260 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.382264 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.382757 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.382916 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.383024 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:07:47.382997873 +0000 UTC m=+21.950381943 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.383116 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.383221 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.383268 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:47.383261742 +0000 UTC m=+21.950645812 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.383633 4684 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.383735 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.385426 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.386432 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.387104 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.388603 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.389383 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.390429 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.391167 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.391982 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.392274 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.392756 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.393817 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.394613 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.395768 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.396346 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.397598 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.398636 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.399417 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.401109 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.401815 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.402200 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.402764 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.403330 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.403971 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.405074 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.412461 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.421183 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.433051 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.445091 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.484568 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.484619 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.484648 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484803 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484824 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484836 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484861 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484912 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:47.484878854 +0000 UTC m=+22.052262924 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484954 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:47.484933646 +0000 UTC m=+22.052317716 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484980 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484992 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.484999 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:46 crc kubenswrapper[4684]: E1013 13:07:46.485030 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:47.485023349 +0000 UTC m=+22.052407419 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.490169 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"93cc38b258854c8d513b3dd823a78cecb4508d48675cd7708522777a93ba02d6"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.492162 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.492225 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bbb91ecb6de908d31e4f3c0c0c1da5fb8c62c9f1ea0d1df6f95babc732f7785e"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.493893 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.493946 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.493964 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"534e19347964633559338c4f316f116f9a55285d8f1baa41926c888d8d1067df"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.495105 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-p9ngm" event={"ID":"310416cc-40e9-4131-a1fc-535d397195b7","Type":"ContainerStarted","Data":"0f302000ce0eb81518172658bd58cf875a16561ff23ccbb44dd175e85c9eee4c"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.497498 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.498983 4684 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174" exitCode=255 Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.499468 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174"} Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.504357 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.515869 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.529049 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.547765 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.562553 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.573776 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.584387 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.598250 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.609067 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.622285 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.637858 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.640610 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.641529 4684 scope.go:117] "RemoveContainer" containerID="d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.653274 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.668750 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.680945 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.695591 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-r7wd2"] Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.695883 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-wns5s"] Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.696004 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.696136 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.696830 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-5w59x"] Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.697365 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.698379 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.698682 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.698722 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.699012 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.699019 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.699046 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.699083 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.701760 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.701825 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.701953 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.702039 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.702341 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.711669 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.723127 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.774261 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.807019 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.838369 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.861588 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.879059 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889123 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-k8s-cni-cncf-io\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889196 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e54ad64a-6df7-4082-afde-d56463121b3f-mcd-auth-proxy-config\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889324 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-etc-kubernetes\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889367 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-socket-dir-parent\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889391 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2dpv\" (UniqueName: \"kubernetes.io/projected/eb2c3381-fecf-46e7-a034-d3c560dff35e-kube-api-access-d2dpv\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889452 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889579 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2434fbe2-6014-4914-8ed3-c5d18e053150-cni-binary-copy\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889670 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-daemon-config\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889743 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-netns\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889777 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-os-release\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889853 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r6r4\" (UniqueName: \"kubernetes.io/projected/2434fbe2-6014-4914-8ed3-c5d18e053150-kube-api-access-7r6r4\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.889973 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-system-cni-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890010 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-os-release\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890034 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-cni-bin\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890064 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-system-cni-dir\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890119 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-kubelet\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890166 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-cni-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890190 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-conf-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890259 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-multus-certs\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890307 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2434fbe2-6014-4914-8ed3-c5d18e053150-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890332 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-cnibin\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890413 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/eb2c3381-fecf-46e7-a034-d3c560dff35e-cni-binary-copy\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890456 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-hostroot\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890524 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9jv8\" (UniqueName: \"kubernetes.io/projected/e54ad64a-6df7-4082-afde-d56463121b3f-kube-api-access-h9jv8\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890606 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e54ad64a-6df7-4082-afde-d56463121b3f-proxy-tls\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890632 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-cnibin\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890682 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e54ad64a-6df7-4082-afde-d56463121b3f-rootfs\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.890759 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-cni-multus\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.894366 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.908224 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.922157 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.937427 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.959936 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.973636 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.988186 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991584 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9jv8\" (UniqueName: \"kubernetes.io/projected/e54ad64a-6df7-4082-afde-d56463121b3f-kube-api-access-h9jv8\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991646 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e54ad64a-6df7-4082-afde-d56463121b3f-proxy-tls\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991685 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-cnibin\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991722 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-cni-multus\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991753 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e54ad64a-6df7-4082-afde-d56463121b3f-rootfs\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991794 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-k8s-cni-cncf-io\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991822 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e54ad64a-6df7-4082-afde-d56463121b3f-mcd-auth-proxy-config\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991850 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-cni-multus\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991871 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-cnibin\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991932 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e54ad64a-6df7-4082-afde-d56463121b3f-rootfs\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991917 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-etc-kubernetes\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.991859 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-etc-kubernetes\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992081 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-socket-dir-parent\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992120 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2dpv\" (UniqueName: \"kubernetes.io/projected/eb2c3381-fecf-46e7-a034-d3c560dff35e-kube-api-access-d2dpv\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992181 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992196 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-socket-dir-parent\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992208 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-daemon-config\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992246 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2434fbe2-6014-4914-8ed3-c5d18e053150-cni-binary-copy\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992269 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-netns\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992287 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-os-release\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992307 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r6r4\" (UniqueName: \"kubernetes.io/projected/2434fbe2-6014-4914-8ed3-c5d18e053150-kube-api-access-7r6r4\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992341 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-system-cni-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992360 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-os-release\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992379 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-cni-bin\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992397 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-system-cni-dir\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992417 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-kubelet\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992439 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-cni-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992459 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-conf-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992477 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-multus-certs\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992495 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2434fbe2-6014-4914-8ed3-c5d18e053150-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992519 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-cnibin\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992533 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/eb2c3381-fecf-46e7-a034-d3c560dff35e-cni-binary-copy\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992550 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-hostroot\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992606 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-system-cni-dir\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992633 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-kubelet\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992616 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-hostroot\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992720 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e54ad64a-6df7-4082-afde-d56463121b3f-mcd-auth-proxy-config\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992827 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-cni-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992862 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-conf-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.992884 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-multus-certs\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993038 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-netns\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993119 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/eb2c3381-fecf-46e7-a034-d3c560dff35e-multus-daemon-config\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993177 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-var-lib-cni-bin\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993299 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-system-cni-dir\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993324 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-os-release\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993373 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-cnibin\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993432 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-os-release\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993471 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2434fbe2-6014-4914-8ed3-c5d18e053150-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993484 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2434fbe2-6014-4914-8ed3-c5d18e053150-cni-binary-copy\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993783 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2434fbe2-6014-4914-8ed3-c5d18e053150-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993808 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/eb2c3381-fecf-46e7-a034-d3c560dff35e-host-run-k8s-cni-cncf-io\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.993977 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/eb2c3381-fecf-46e7-a034-d3c560dff35e-cni-binary-copy\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:46 crc kubenswrapper[4684]: I1013 13:07:46.998356 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e54ad64a-6df7-4082-afde-d56463121b3f-proxy-tls\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.008871 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9jv8\" (UniqueName: \"kubernetes.io/projected/e54ad64a-6df7-4082-afde-d56463121b3f-kube-api-access-h9jv8\") pod \"machine-config-daemon-wns5s\" (UID: \"e54ad64a-6df7-4082-afde-d56463121b3f\") " pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.009697 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2dpv\" (UniqueName: \"kubernetes.io/projected/eb2c3381-fecf-46e7-a034-d3c560dff35e-kube-api-access-d2dpv\") pod \"multus-r7wd2\" (UID: \"eb2c3381-fecf-46e7-a034-d3c560dff35e\") " pod="openshift-multus/multus-r7wd2" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.011624 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r6r4\" (UniqueName: \"kubernetes.io/projected/2434fbe2-6014-4914-8ed3-c5d18e053150-kube-api-access-7r6r4\") pod \"multus-additional-cni-plugins-5w59x\" (UID: \"2434fbe2-6014-4914-8ed3-c5d18e053150\") " pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.013978 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-r7wd2" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.022136 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:07:47 crc kubenswrapper[4684]: W1013 13:07:47.031040 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb2c3381_fecf_46e7_a034_d3c560dff35e.slice/crio-6800f2eba1eb873ad620e2836fbfa960687bfe0a77b0dc895a9f85346c464280 WatchSource:0}: Error finding container 6800f2eba1eb873ad620e2836fbfa960687bfe0a77b0dc895a9f85346c464280: Status 404 returned error can't find the container with id 6800f2eba1eb873ad620e2836fbfa960687bfe0a77b0dc895a9f85346c464280 Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.037843 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5w59x" Oct 13 13:07:47 crc kubenswrapper[4684]: W1013 13:07:47.060485 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2434fbe2_6014_4914_8ed3_c5d18e053150.slice/crio-e8e82e944cb1a850df981f13eb606bb49eb369d43bff233000ddd0269f30a7d7 WatchSource:0}: Error finding container e8e82e944cb1a850df981f13eb606bb49eb369d43bff233000ddd0269f30a7d7: Status 404 returned error can't find the container with id e8e82e944cb1a850df981f13eb606bb49eb369d43bff233000ddd0269f30a7d7 Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.094002 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9sq8c"] Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.095038 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.103955 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.104021 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.103967 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.104430 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.104467 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.104546 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.104658 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.120943 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.134286 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.153575 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.174412 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194430 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-var-lib-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194486 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-slash\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194506 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-log-socket\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194520 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-systemd-units\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194535 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-node-log\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194562 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-netns\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194578 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-ovn\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194692 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-env-overrides\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194791 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-script-lib\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194831 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-bin\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194855 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-config\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194886 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-ovn-kubernetes\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194929 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovn-node-metrics-cert\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.194954 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-systemd\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.195006 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-kubelet\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.195029 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.195070 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-etc-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.195094 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-netd\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.195137 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.195165 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnvbx\" (UniqueName: \"kubernetes.io/projected/9b180ad7-c68c-4234-9b7b-aa938e5ad590-kube-api-access-mnvbx\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.202330 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.228610 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.245848 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.260861 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.276110 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.293637 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296514 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-kubelet\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296611 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-kubelet\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296686 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296758 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296767 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-etc-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296815 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-etc-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296844 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-netd\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296879 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296940 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnvbx\" (UniqueName: \"kubernetes.io/projected/9b180ad7-c68c-4234-9b7b-aa938e5ad590-kube-api-access-mnvbx\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.296968 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-var-lib-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297001 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-slash\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297032 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-log-socket\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297055 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-systemd-units\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297076 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-node-log\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297050 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297129 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-netns\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297154 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-slash\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297109 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-netns\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297178 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-log-socket\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297199 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-systemd-units\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297243 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-node-log\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297269 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-ovn\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297266 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-ovn\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297342 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-env-overrides\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297329 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-var-lib-openvswitch\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297402 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-script-lib\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297440 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-bin\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297463 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-config\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297495 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-ovn-kubernetes\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297518 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovn-node-metrics-cert\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297537 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-systemd\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297632 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-systemd\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297650 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-netd\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297913 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-bin\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.297957 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-ovn-kubernetes\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.298216 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-env-overrides\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.298258 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-config\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.298522 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-script-lib\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.303459 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovn-node-metrics-cert\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.310881 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.320208 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnvbx\" (UniqueName: \"kubernetes.io/projected/9b180ad7-c68c-4234-9b7b-aa938e5ad590-kube-api-access-mnvbx\") pod \"ovnkube-node-9sq8c\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.327062 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.348384 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.350029 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.350045 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.350043 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.350236 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.350390 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.350481 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.399054 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.399226 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:07:49.399196768 +0000 UTC m=+23.966580848 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.399324 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.399677 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.399742 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:49.399729933 +0000 UTC m=+23.967114003 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.500069 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.500128 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.500183 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500291 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500392 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:49.500368926 +0000 UTC m=+24.067752996 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500303 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500410 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500421 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500436 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500450 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500458 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500500 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:49.50049072 +0000 UTC m=+24.067874790 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:47 crc kubenswrapper[4684]: E1013 13:07:47.500537 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:49.500526691 +0000 UTC m=+24.067910761 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.504438 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-p9ngm" event={"ID":"310416cc-40e9-4131-a1fc-535d397195b7","Type":"ContainerStarted","Data":"ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.505756 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerStarted","Data":"41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.505812 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerStarted","Data":"6800f2eba1eb873ad620e2836fbfa960687bfe0a77b0dc895a9f85346c464280"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.507984 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.510015 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.510245 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.511382 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerStarted","Data":"c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.511413 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerStarted","Data":"e8e82e944cb1a850df981f13eb606bb49eb369d43bff233000ddd0269f30a7d7"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.513882 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.513928 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.513940 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"fe833bde0f8721010a42aa2467b6cb6c1c9978ce0f0c7e587f2d1b95383ffa79"} Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.515421 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.525442 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: W1013 13:07:47.530576 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b180ad7_c68c_4234_9b7b_aa938e5ad590.slice/crio-2d185939a58a3231b24507b5d66c1e44a27830c1e6ff09fa64e1cd3477b40a44 WatchSource:0}: Error finding container 2d185939a58a3231b24507b5d66c1e44a27830c1e6ff09fa64e1cd3477b40a44: Status 404 returned error can't find the container with id 2d185939a58a3231b24507b5d66c1e44a27830c1e6ff09fa64e1cd3477b40a44 Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.546116 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.561826 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.580602 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.598285 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.621841 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.637295 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.658947 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.673590 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.690759 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.705046 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.722277 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.744712 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.768110 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.806754 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.827516 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.853510 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.873760 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.905318 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.926966 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.948747 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.973742 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:47 crc kubenswrapper[4684]: I1013 13:07:47.992712 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:47Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.007950 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.020734 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.039612 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.228938 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-fqh5v"] Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.229837 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.232640 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.232701 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.235051 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.235268 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.249010 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.263243 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.274834 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.288751 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.301339 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.309890 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7spsn\" (UniqueName: \"kubernetes.io/projected/ee89fde6-f464-42d7-aa10-83a15b2bf981-kube-api-access-7spsn\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.309983 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee89fde6-f464-42d7-aa10-83a15b2bf981-host\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.310033 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ee89fde6-f464-42d7-aa10-83a15b2bf981-serviceca\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.323089 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.335831 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.348874 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.359982 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.397489 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.410745 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7spsn\" (UniqueName: \"kubernetes.io/projected/ee89fde6-f464-42d7-aa10-83a15b2bf981-kube-api-access-7spsn\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.411082 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee89fde6-f464-42d7-aa10-83a15b2bf981-host\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.411179 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ee89fde6-f464-42d7-aa10-83a15b2bf981-host\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.411297 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ee89fde6-f464-42d7-aa10-83a15b2bf981-serviceca\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.412467 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ee89fde6-f464-42d7-aa10-83a15b2bf981-serviceca\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.429313 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.453646 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7spsn\" (UniqueName: \"kubernetes.io/projected/ee89fde6-f464-42d7-aa10-83a15b2bf981-kube-api-access-7spsn\") pod \"node-ca-fqh5v\" (UID: \"ee89fde6-f464-42d7-aa10-83a15b2bf981\") " pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.488104 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.518606 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" exitCode=0 Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.518719 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.518826 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"2d185939a58a3231b24507b5d66c1e44a27830c1e6ff09fa64e1cd3477b40a44"} Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.524667 4684 generic.go:334] "Generic (PLEG): container finished" podID="2434fbe2-6014-4914-8ed3-c5d18e053150" containerID="c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7" exitCode=0 Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.524763 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerDied","Data":"c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7"} Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.532594 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.542325 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-fqh5v" Oct 13 13:07:48 crc kubenswrapper[4684]: W1013 13:07:48.563049 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee89fde6_f464_42d7_aa10_83a15b2bf981.slice/crio-b6a75fc31f63aad92bbae891ad9861331bf62b640b58e78e597ac036894953cc WatchSource:0}: Error finding container b6a75fc31f63aad92bbae891ad9861331bf62b640b58e78e597ac036894953cc: Status 404 returned error can't find the container with id b6a75fc31f63aad92bbae891ad9861331bf62b640b58e78e597ac036894953cc Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.568612 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.604995 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.651361 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.687603 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.726091 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.766284 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.806535 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.858964 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.894600 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.928667 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:48 crc kubenswrapper[4684]: I1013 13:07:48.970389 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:48Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.007676 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.047688 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.086662 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.128948 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.350214 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.350329 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.350377 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.350407 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.350538 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.350646 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.420890 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.421013 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.421059 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:07:53.42103636 +0000 UTC m=+27.988420430 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.421096 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.421131 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:53.421123383 +0000 UTC m=+27.988507443 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.521997 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.522068 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.522112 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522213 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522237 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522244 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522260 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522298 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:53.522280622 +0000 UTC m=+28.089664702 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522317 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:53.522309043 +0000 UTC m=+28.089693123 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522340 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522351 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522359 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:49 crc kubenswrapper[4684]: E1013 13:07:49.522402 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:07:53.522384205 +0000 UTC m=+28.089768375 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.530062 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-fqh5v" event={"ID":"ee89fde6-f464-42d7-aa10-83a15b2bf981","Type":"ContainerStarted","Data":"cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.530116 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-fqh5v" event={"ID":"ee89fde6-f464-42d7-aa10-83a15b2bf981","Type":"ContainerStarted","Data":"b6a75fc31f63aad92bbae891ad9861331bf62b640b58e78e597ac036894953cc"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.534530 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.534588 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.534598 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.534607 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.534617 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.534626 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.537054 4684 generic.go:334] "Generic (PLEG): container finished" podID="2434fbe2-6014-4914-8ed3-c5d18e053150" containerID="a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908" exitCode=0 Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.537182 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerDied","Data":"a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.538661 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc"} Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.548799 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.574920 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.614426 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.646200 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.673294 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.689128 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.702707 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.713358 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.723789 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.735150 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.748715 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.761475 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.771947 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.786044 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.799345 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.818083 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.831095 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.844138 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.894139 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.927594 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:49 crc kubenswrapper[4684]: I1013 13:07:49.985591 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:49Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.014032 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.052626 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.090494 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.133389 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.174510 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.210932 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.252394 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.544519 4684 generic.go:334] "Generic (PLEG): container finished" podID="2434fbe2-6014-4914-8ed3-c5d18e053150" containerID="b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1" exitCode=0 Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.545677 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerDied","Data":"b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1"} Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.563260 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.585617 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.598917 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.625647 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.643451 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.658837 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.672663 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.685522 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.697342 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.711217 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.724880 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.736970 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.766139 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.778424 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.782663 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.804740 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.827556 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.871858 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.906543 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.945328 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:50 crc kubenswrapper[4684]: I1013 13:07:50.987617 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:50Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.035388 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.066733 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.117858 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.153284 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.187547 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.228861 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.271021 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.311425 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.349944 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.350006 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.350006 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:51 crc kubenswrapper[4684]: E1013 13:07:51.350134 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:51 crc kubenswrapper[4684]: E1013 13:07:51.350330 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:51 crc kubenswrapper[4684]: E1013 13:07:51.350448 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.353037 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.391218 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.433473 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.564416 4684 generic.go:334] "Generic (PLEG): container finished" podID="2434fbe2-6014-4914-8ed3-c5d18e053150" containerID="2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da" exitCode=0 Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.564491 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerDied","Data":"2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da"} Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.582368 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.601009 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.623855 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.638466 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.650499 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.665944 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.707824 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.745363 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.794476 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.830000 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.868596 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.915759 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.949046 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:51 crc kubenswrapper[4684]: I1013 13:07:51.989502 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:51Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.027570 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.165458 4684 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.168020 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.168089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.168105 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.168252 4684 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.176479 4684 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.176888 4684 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.178232 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.178284 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.178294 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.178315 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.178328 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: E1013 13:07:52.196841 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.202746 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.202801 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.202818 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.202842 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.202858 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: E1013 13:07:52.217316 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.221099 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.221128 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.221137 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.221151 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.221161 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: E1013 13:07:52.235708 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.240769 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.240813 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.240829 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.240851 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.240870 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: E1013 13:07:52.259882 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.263843 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.263887 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.263913 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.263932 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.263945 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: E1013 13:07:52.278780 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: E1013 13:07:52.279028 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.280589 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.280659 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.280672 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.280727 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.280738 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.384684 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.384790 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.384801 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.385115 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.385149 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.487253 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.487308 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.487322 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.487342 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.487357 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.573206 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.577499 4684 generic.go:334] "Generic (PLEG): container finished" podID="2434fbe2-6014-4914-8ed3-c5d18e053150" containerID="5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318" exitCode=0 Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.577564 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerDied","Data":"5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.590171 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.590534 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.590557 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.590580 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.590594 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.595757 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.612647 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.633635 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.646066 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.657965 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.672890 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.696096 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.696121 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.696129 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.696143 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.696152 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.703873 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.729070 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.744256 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.755810 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.772347 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.784955 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.802577 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.802632 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.802642 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.802661 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.802674 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.815013 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.831570 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.846296 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:52Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.905709 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.905755 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.905767 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.905782 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:52 crc kubenswrapper[4684]: I1013 13:07:52.905794 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:52Z","lastTransitionTime":"2025-10-13T13:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.008246 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.008285 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.008299 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.008315 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.008326 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.112321 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.112373 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.112386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.112404 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.112418 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.215343 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.215373 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.215382 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.215395 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.215405 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.318586 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.318620 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.318629 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.318645 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.318658 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.349816 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.349854 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.350009 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.350058 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.350203 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.350465 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.421692 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.421752 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.421768 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.421790 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.421807 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.471010 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.471209 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.471249 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:08:01.471218816 +0000 UTC m=+36.038602926 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.471368 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.471463 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:01.471428843 +0000 UTC m=+36.038812963 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.526296 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.526351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.526368 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.526391 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.526408 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.573092 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.573195 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.573241 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573372 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573375 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573429 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573455 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573373 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573397 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573587 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573525 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:01.573501629 +0000 UTC m=+36.140885739 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573634 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:01.573616713 +0000 UTC m=+36.141000823 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:07:53 crc kubenswrapper[4684]: E1013 13:07:53.573656 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:01.573644874 +0000 UTC m=+36.141028984 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.586588 4684 generic.go:334] "Generic (PLEG): container finished" podID="2434fbe2-6014-4914-8ed3-c5d18e053150" containerID="61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6" exitCode=0 Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.586664 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerDied","Data":"61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.618917 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.629033 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.629271 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.629393 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.629533 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.629643 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.641152 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.662635 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.678698 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.698915 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.714163 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.732199 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.732244 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.732253 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.732273 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.732284 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.737083 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.753111 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.765519 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.782502 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.799175 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.815149 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.830735 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.835014 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.835061 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.835071 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.835089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.835102 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.848075 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.860965 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:53Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.937487 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.937539 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.937549 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.937563 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:53 crc kubenswrapper[4684]: I1013 13:07:53.937572 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:53Z","lastTransitionTime":"2025-10-13T13:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.040058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.040111 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.040124 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.040149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.040227 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.143303 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.143368 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.143378 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.143399 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.143413 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.246713 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.246760 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.246770 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.246790 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.246802 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.350041 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.350102 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.350116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.350144 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.350162 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.452950 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.453007 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.453019 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.453040 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.453054 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.556066 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.556106 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.556116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.556135 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.556151 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.594920 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" event={"ID":"2434fbe2-6014-4914-8ed3-c5d18e053150","Type":"ContainerStarted","Data":"37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.599626 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.600137 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.611288 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.627933 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.630288 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.654793 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.658524 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.658592 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.658611 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.658635 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.658653 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.665828 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.676393 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.694677 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.706444 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.726542 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.748947 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.761692 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.761762 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.761783 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.761811 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.761832 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.771268 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.787699 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.803139 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.819680 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.834038 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.862585 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.864725 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.864807 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.864827 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.864855 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.864880 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.880826 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.903787 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.925583 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.946005 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.966610 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.967967 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.968034 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.968054 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.968080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.968099 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:54Z","lastTransitionTime":"2025-10-13T13:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:54 crc kubenswrapper[4684]: I1013 13:07:54.986974 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.027266 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.046968 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.062227 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.070173 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.070251 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.070274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.070303 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.070326 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.076743 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.093300 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.107251 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.142005 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.161685 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.173448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.173498 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.173510 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.173525 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.173536 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.180805 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.277183 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.277262 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.277285 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.277315 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.277338 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.350461 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.350464 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:55 crc kubenswrapper[4684]: E1013 13:07:55.350692 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.350495 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:55 crc kubenswrapper[4684]: E1013 13:07:55.350835 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:55 crc kubenswrapper[4684]: E1013 13:07:55.351134 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.381175 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.381242 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.381256 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.381282 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.381301 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.485034 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.485128 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.485149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.485182 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.485209 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.587842 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.587942 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.587962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.587991 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.588013 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.604398 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.605162 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.629034 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.646670 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.661546 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.682747 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.691652 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.691743 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.691757 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.691782 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.691800 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.698014 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.725985 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.742428 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.758386 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.779363 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.795021 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.795177 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.795239 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.795264 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.795297 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.795321 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.809737 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.829601 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.846957 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.862236 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.877405 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.897360 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.897428 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.897442 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.897462 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.897477 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:55Z","lastTransitionTime":"2025-10-13T13:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:55 crc kubenswrapper[4684]: I1013 13:07:55.900340 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:55Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.000827 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.000898 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.000925 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.000949 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.000961 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.117508 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.117538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.117545 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.117558 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.117566 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.220053 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.220089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.220101 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.220116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.220127 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.323272 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.323704 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.323715 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.323734 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.323746 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.369211 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.386696 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.404517 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.422122 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.426178 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.426229 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.426245 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.426266 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.426281 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.436027 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.457445 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.471360 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.497181 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.510793 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.521989 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.528118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.528181 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.528195 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.528213 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.528228 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.541080 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.555556 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.572685 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.586663 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.597363 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.607082 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.632404 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.632617 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.632677 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.632736 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.632827 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.735451 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.735498 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.735510 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.735529 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.735544 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.838331 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.838370 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.838411 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.838429 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.838440 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.941516 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.941566 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.941578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.941597 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:56 crc kubenswrapper[4684]: I1013 13:07:56.941611 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:56Z","lastTransitionTime":"2025-10-13T13:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.044118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.044179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.044196 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.044221 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.044236 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.146420 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.146491 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.146514 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.146537 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.146553 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.249328 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.249364 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.249376 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.249391 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.249401 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.350351 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.350349 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:57 crc kubenswrapper[4684]: E1013 13:07:57.350515 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:57 crc kubenswrapper[4684]: E1013 13:07:57.350651 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.350726 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:57 crc kubenswrapper[4684]: E1013 13:07:57.350892 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.353008 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.353052 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.353068 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.353089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.353105 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.455856 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.455933 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.455946 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.455963 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.455975 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.559578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.559645 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.559665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.559691 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.559710 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.616098 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/0.log" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.620386 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e" exitCode=1 Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.620457 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.621463 4684 scope.go:117] "RemoveContainer" containerID="37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.641879 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.661972 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.662651 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.662700 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.662716 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.662736 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.662749 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.695231 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:56Z\\\",\\\"message\\\":\\\"013 13:07:56.678561 5968 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:56.678581 5968 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 13:07:56.678590 5968 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:56.678945 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:56.678970 5968 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:56.678989 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 13:07:56.678995 5968 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 13:07:56.679000 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:56.679016 5968 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:56.679023 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:56.679061 5968 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:56.679080 5968 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 13:07:56.679095 5968 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:56.679104 5968 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:56.679104 5968 factory.go:656] Stopping watch factory\\\\nI1013 13:07:56.679115 5968 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.712588 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.734915 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.749610 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.765741 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.765805 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.765835 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.765866 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.765958 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.774395 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.790168 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.803723 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.825265 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.842391 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.857401 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.869061 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.869118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.869140 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.869166 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.869185 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.875500 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.886737 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.906590 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:57Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.972252 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.972305 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.972321 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.972342 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:57 crc kubenswrapper[4684]: I1013 13:07:57.972354 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:57Z","lastTransitionTime":"2025-10-13T13:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.032560 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.074713 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.074785 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.074800 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.074824 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.074841 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.177453 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.177486 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.177494 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.177510 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.177520 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.279313 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.279355 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.279367 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.279383 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.279394 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.381557 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.381622 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.381634 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.381658 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.381673 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.484178 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.484218 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.484227 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.484240 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.484249 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.586346 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.586800 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.586812 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.586824 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.586833 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.623931 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/1.log" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.624654 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/0.log" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.626803 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be" exitCode=1 Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.626849 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.626937 4684 scope.go:117] "RemoveContainer" containerID="37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.627752 4684 scope.go:117] "RemoveContainer" containerID="6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be" Oct 13 13:07:58 crc kubenswrapper[4684]: E1013 13:07:58.627940 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.646754 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:56Z\\\",\\\"message\\\":\\\"013 13:07:56.678561 5968 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:56.678581 5968 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 13:07:56.678590 5968 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:56.678945 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:56.678970 5968 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:56.678989 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 13:07:56.678995 5968 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 13:07:56.679000 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:56.679016 5968 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:56.679023 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:56.679061 5968 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:56.679080 5968 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 13:07:56.679095 5968 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:56.679104 5968 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:56.679104 5968 factory.go:656] Stopping watch factory\\\\nI1013 13:07:56.679115 5968 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.659811 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.670201 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.688854 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.690266 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.690350 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.690406 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.690434 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.690452 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.704497 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.716376 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.737052 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.750878 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.761727 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.774329 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.784393 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb"] Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.784773 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.786856 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.788067 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.792403 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.792450 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.792465 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.792483 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.792497 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.799972 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.824693 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.837983 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/035a2687-3587-4656-887b-f8de9008fbfe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.838033 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/035a2687-3587-4656-887b-f8de9008fbfe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.838069 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4dfv\" (UniqueName: \"kubernetes.io/projected/035a2687-3587-4656-887b-f8de9008fbfe-kube-api-access-g4dfv\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.838102 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/035a2687-3587-4656-887b-f8de9008fbfe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.848806 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.863566 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.878063 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.886660 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.895039 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.895069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.895079 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.895093 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.895103 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.897835 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.907222 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.935807 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.938397 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/035a2687-3587-4656-887b-f8de9008fbfe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.938438 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/035a2687-3587-4656-887b-f8de9008fbfe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.938479 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4dfv\" (UniqueName: \"kubernetes.io/projected/035a2687-3587-4656-887b-f8de9008fbfe-kube-api-access-g4dfv\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.938516 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/035a2687-3587-4656-887b-f8de9008fbfe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.939052 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/035a2687-3587-4656-887b-f8de9008fbfe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.939202 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/035a2687-3587-4656-887b-f8de9008fbfe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.945655 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/035a2687-3587-4656-887b-f8de9008fbfe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.951887 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.955878 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4dfv\" (UniqueName: \"kubernetes.io/projected/035a2687-3587-4656-887b-f8de9008fbfe-kube-api-access-g4dfv\") pod \"ovnkube-control-plane-749d76644c-fgzqb\" (UID: \"035a2687-3587-4656-887b-f8de9008fbfe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.970826 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.986943 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:58Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.997580 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.997628 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.997641 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.997659 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:58 crc kubenswrapper[4684]: I1013 13:07:58.997672 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:58Z","lastTransitionTime":"2025-10-13T13:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.007119 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.025839 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.040671 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.057059 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.076382 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.093683 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.098557 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.099590 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.099662 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.099736 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.099812 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.099868 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.110774 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: W1013 13:07:59.116126 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod035a2687_3587_4656_887b_f8de9008fbfe.slice/crio-9c8b0bca6f0d2045109a7da88e39c5ebe67a0d423d4aa76edf3e8c8f45ed0dbc WatchSource:0}: Error finding container 9c8b0bca6f0d2045109a7da88e39c5ebe67a0d423d4aa76edf3e8c8f45ed0dbc: Status 404 returned error can't find the container with id 9c8b0bca6f0d2045109a7da88e39c5ebe67a0d423d4aa76edf3e8c8f45ed0dbc Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.146965 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:56Z\\\",\\\"message\\\":\\\"013 13:07:56.678561 5968 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:56.678581 5968 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 13:07:56.678590 5968 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:56.678945 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:56.678970 5968 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:56.678989 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 13:07:56.678995 5968 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 13:07:56.679000 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:56.679016 5968 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:56.679023 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:56.679061 5968 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:56.679080 5968 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 13:07:56.679095 5968 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:56.679104 5968 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:56.679104 5968 factory.go:656] Stopping watch factory\\\\nI1013 13:07:56.679115 5968 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.163691 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.203992 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.204033 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.204047 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.204066 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.204081 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.307038 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.307145 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.307205 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.307233 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.307291 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.349880 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.350020 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:07:59 crc kubenswrapper[4684]: E1013 13:07:59.350060 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.350020 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:07:59 crc kubenswrapper[4684]: E1013 13:07:59.350227 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:07:59 crc kubenswrapper[4684]: E1013 13:07:59.350305 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.410102 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.410153 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.410166 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.410188 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.410202 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.513002 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.513080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.513101 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.514351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.514401 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.617555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.617606 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.617618 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.617634 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.617645 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.633127 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" event={"ID":"035a2687-3587-4656-887b-f8de9008fbfe","Type":"ContainerStarted","Data":"b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.634064 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" event={"ID":"035a2687-3587-4656-887b-f8de9008fbfe","Type":"ContainerStarted","Data":"6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.634117 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" event={"ID":"035a2687-3587-4656-887b-f8de9008fbfe","Type":"ContainerStarted","Data":"9c8b0bca6f0d2045109a7da88e39c5ebe67a0d423d4aa76edf3e8c8f45ed0dbc"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.635092 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/1.log" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.638935 4684 scope.go:117] "RemoveContainer" containerID="6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be" Oct 13 13:07:59 crc kubenswrapper[4684]: E1013 13:07:59.639094 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.663292 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37530a647eec6cc9261d77f5aed836b9d879599f483370272c7d101c31e5553e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:56Z\\\",\\\"message\\\":\\\"013 13:07:56.678561 5968 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:56.678581 5968 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 13:07:56.678590 5968 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:56.678945 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:56.678970 5968 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:56.678989 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 13:07:56.678995 5968 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 13:07:56.679000 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:56.679016 5968 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:56.679023 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:56.679061 5968 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:56.679080 5968 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 13:07:56.679095 5968 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:56.679104 5968 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:56.679104 5968 factory.go:656] Stopping watch factory\\\\nI1013 13:07:56.679115 5968 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.673459 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.685302 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.696400 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.715546 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.719545 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.719582 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.719591 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.719607 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.719624 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.729231 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.744245 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.759735 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.769435 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.777607 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.787605 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.798193 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.809717 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.821863 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.821930 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.821946 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.821963 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.821975 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.821996 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.832565 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.842595 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.853928 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.865153 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.877165 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.892282 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.905856 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.917159 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.924358 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.924420 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.924443 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.924472 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.924491 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:07:59Z","lastTransitionTime":"2025-10-13T13:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.943816 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.956030 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.967154 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.980008 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:07:59 crc kubenswrapper[4684]: I1013 13:07:59.992176 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:07:59Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.012464 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.026868 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.026914 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.026924 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.026938 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.026949 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.034974 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.058426 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.083764 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.105431 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.129710 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.129766 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.129782 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.129804 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.129823 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.233307 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.233374 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.233395 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.233424 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.233446 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.312245 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-mlkgd"] Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.313105 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: E1013 13:08:00.313205 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.336834 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.336952 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.336980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.337027 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.337054 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.340276 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.352114 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2t2q\" (UniqueName: \"kubernetes.io/projected/9150445c-49fc-46c8-b101-d672f0485cbb-kube-api-access-z2t2q\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.352238 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.355592 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.374007 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.389346 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.422173 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.439998 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.440115 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.440139 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.440171 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.440199 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.449545 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.453420 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2t2q\" (UniqueName: \"kubernetes.io/projected/9150445c-49fc-46c8-b101-d672f0485cbb-kube-api-access-z2t2q\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.453511 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: E1013 13:08:00.453779 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:00 crc kubenswrapper[4684]: E1013 13:08:00.453882 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:08:00.953855584 +0000 UTC m=+35.521239684 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.469701 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.476953 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2t2q\" (UniqueName: \"kubernetes.io/projected/9150445c-49fc-46c8-b101-d672f0485cbb-kube-api-access-z2t2q\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.487053 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.505879 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.522430 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.543704 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.543755 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.543772 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.543792 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.543808 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.546886 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.568926 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.588043 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.608021 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.621362 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.643564 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.646861 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.646954 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.646981 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.647019 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.647040 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.670041 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:00Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.750090 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.750164 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.750184 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.750210 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.750229 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.853528 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.853600 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.853612 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.853642 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.853659 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.956491 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.956538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.956550 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.956567 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.956582 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:00Z","lastTransitionTime":"2025-10-13T13:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:00 crc kubenswrapper[4684]: I1013 13:08:00.959342 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:00 crc kubenswrapper[4684]: E1013 13:08:00.959542 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:00 crc kubenswrapper[4684]: E1013 13:08:00.959609 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:08:01.959589827 +0000 UTC m=+36.526973917 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.060099 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.060164 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.060181 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.060205 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.060223 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.163487 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.163557 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.163578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.163607 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.163632 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.266963 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.267038 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.267061 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.267128 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.267156 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.349719 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.349719 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.349936 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.350151 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.350343 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.350526 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.369209 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.369258 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.369271 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.369289 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.369300 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.471730 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.471775 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.471785 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.471803 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.471813 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.566047 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.566316 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:08:17.566272074 +0000 UTC m=+52.133656204 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.566388 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.566528 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.566614 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:17.566592454 +0000 UTC m=+52.133976554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.575377 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.575476 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.575504 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.575536 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.575567 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.667420 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.667504 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.667564 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667718 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667783 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667816 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667834 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667794 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667958 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667752 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.667929 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:17.667878277 +0000 UTC m=+52.235262377 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.668077 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:17.668055743 +0000 UTC m=+52.235439943 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.668100 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:17.668094844 +0000 UTC m=+52.235478914 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.678803 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.679268 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.679284 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.679304 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.679319 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.782528 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.782610 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.782634 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.782664 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.782688 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.886225 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.886287 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.886306 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.886326 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.886338 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.972079 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.972360 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: E1013 13:08:01.972486 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:08:03.972452868 +0000 UTC m=+38.539836978 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.989121 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.989179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.989195 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.989219 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:01 crc kubenswrapper[4684]: I1013 13:08:01.989236 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:01Z","lastTransitionTime":"2025-10-13T13:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.093439 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.093504 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.093514 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.093536 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.093548 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.197594 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.197666 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.197684 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.197709 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.197725 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.300507 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.300542 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.300551 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.300569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.300581 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.350649 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.350874 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.403272 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.403338 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.403351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.403374 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.403392 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.459762 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.459821 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.459837 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.459853 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.459870 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.477251 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:02Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.481994 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.482057 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.482081 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.482107 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.482125 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.500109 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:02Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.504718 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.504771 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.504787 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.504808 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.504823 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.522824 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:02Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.527107 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.527164 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.527182 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.527206 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.527224 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.546911 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:02Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.551823 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.551862 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.551881 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.551928 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.551947 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.569868 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:02Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:02 crc kubenswrapper[4684]: E1013 13:08:02.570174 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.572162 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.572227 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.572254 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.572283 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.572305 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.675264 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.675340 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.675360 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.675386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.675405 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.779247 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.779306 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.779324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.779352 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.779371 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.882441 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.882489 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.882498 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.882519 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.882530 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.985279 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.985373 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.985395 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.985420 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:02 crc kubenswrapper[4684]: I1013 13:08:02.985438 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:02Z","lastTransitionTime":"2025-10-13T13:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.088715 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.088760 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.088769 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.088805 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.088816 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.191628 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.191688 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.191706 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.191729 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.191746 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.295043 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.295114 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.295139 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.295168 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.295187 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.350345 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.350468 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:03 crc kubenswrapper[4684]: E1013 13:08:03.350534 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.350606 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:03 crc kubenswrapper[4684]: E1013 13:08:03.350731 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:03 crc kubenswrapper[4684]: E1013 13:08:03.350839 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.398633 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.398822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.398852 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.398883 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.398937 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.501600 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.501654 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.501672 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.501697 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.501715 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.604725 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.604799 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.604816 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.604840 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.604957 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.683006 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.705019 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.707948 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.708014 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.708039 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.708069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.708092 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.727537 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.745268 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.769999 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.788638 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.808555 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.811427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.811509 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.811535 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.811569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.811592 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.830457 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.861965 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.878675 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.894517 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.915018 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.915071 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.915087 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.915109 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.915125 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:03Z","lastTransitionTime":"2025-10-13T13:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.928726 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.951427 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.972882 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.993352 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:03Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:03 crc kubenswrapper[4684]: I1013 13:08:03.994120 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:03 crc kubenswrapper[4684]: E1013 13:08:03.994287 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:03 crc kubenswrapper[4684]: E1013 13:08:03.994358 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:08:07.994336253 +0000 UTC m=+42.561720363 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.011741 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:04Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.017958 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.018023 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.018047 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.018076 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.018099 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.028045 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:04Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.049556 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:04Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.121203 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.121262 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.121278 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.121305 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.121322 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.225464 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.225532 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.225549 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.225575 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.225592 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.329187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.329470 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.329565 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.329645 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.329726 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.350013 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:04 crc kubenswrapper[4684]: E1013 13:08:04.350277 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.433115 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.433192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.433219 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.433248 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.433271 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.536188 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.536242 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.536260 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.536284 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.536303 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.639986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.640038 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.640049 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.640069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.640081 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.743754 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.743804 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.743822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.743846 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.743862 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.847458 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.847551 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.847586 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.847620 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.847642 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.950592 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.950665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.950684 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.950709 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:04 crc kubenswrapper[4684]: I1013 13:08:04.950727 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:04Z","lastTransitionTime":"2025-10-13T13:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.053386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.053433 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.053446 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.053463 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.053474 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.156776 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.156843 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.156862 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.156889 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.156939 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.260432 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.260604 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.260631 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.260663 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.260686 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.350180 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.350298 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.350368 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:05 crc kubenswrapper[4684]: E1013 13:08:05.350380 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:05 crc kubenswrapper[4684]: E1013 13:08:05.350507 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:05 crc kubenswrapper[4684]: E1013 13:08:05.350630 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.363002 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.363042 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.363056 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.363077 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.363093 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.465372 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.465457 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.465481 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.465511 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.465536 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.568227 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.568285 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.568298 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.568314 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.568326 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.670784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.670860 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.670881 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.670946 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.670992 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.774101 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.774154 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.774164 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.774179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.774191 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.877720 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.877773 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.877786 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.877803 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.877814 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.980696 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.980786 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.980806 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.980839 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:05 crc kubenswrapper[4684]: I1013 13:08:05.980862 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:05Z","lastTransitionTime":"2025-10-13T13:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.083841 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.083896 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.083927 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.083950 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.083967 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.187224 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.187285 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.187298 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.187315 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.187327 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.290633 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.290714 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.290740 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.290772 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.290795 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.349873 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:06 crc kubenswrapper[4684]: E1013 13:08:06.350238 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.370973 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.387071 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.394150 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.394201 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.394215 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.394236 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.394251 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.419349 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.436299 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.466118 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.483551 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.498059 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.498136 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.498149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.498169 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.498183 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.498479 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.512739 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.525522 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.539801 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.554419 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.574652 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.589526 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.604274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.604311 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.604323 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.604338 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.604350 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.615454 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.641129 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.655666 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.669305 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:06Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.707498 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.707535 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.707543 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.707556 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.707565 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.809476 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.809524 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.809541 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.809564 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.809585 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.911779 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.912078 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.912160 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.912259 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:06 crc kubenswrapper[4684]: I1013 13:08:06.912340 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:06Z","lastTransitionTime":"2025-10-13T13:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.014487 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.014525 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.014534 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.014547 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.014556 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.117427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.117472 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.117484 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.117501 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.117515 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.221447 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.221532 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.221557 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.221590 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.221618 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.325870 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.326002 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.326025 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.326055 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.326079 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.349778 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.349875 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.349809 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:07 crc kubenswrapper[4684]: E1013 13:08:07.350075 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:07 crc kubenswrapper[4684]: E1013 13:08:07.350194 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:07 crc kubenswrapper[4684]: E1013 13:08:07.350374 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.429657 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.429718 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.429733 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.429755 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.429775 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.532687 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.532764 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.532782 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.532812 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.532840 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.635999 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.636075 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.636094 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.636121 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.636141 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.739170 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.739249 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.739270 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.739295 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.739312 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.842676 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.842782 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.842802 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.842834 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.842856 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.945752 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.945874 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.945897 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.945998 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:07 crc kubenswrapper[4684]: I1013 13:08:07.946026 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:07Z","lastTransitionTime":"2025-10-13T13:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.044262 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:08 crc kubenswrapper[4684]: E1013 13:08:08.044438 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:08 crc kubenswrapper[4684]: E1013 13:08:08.044546 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:08:16.044515978 +0000 UTC m=+50.611900078 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.049984 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.050050 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.050070 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.050101 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.050122 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.153942 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.154010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.154028 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.154055 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.154075 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.257932 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.258007 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.258025 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.258055 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.258073 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.350544 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:08 crc kubenswrapper[4684]: E1013 13:08:08.350888 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.360192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.360236 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.360247 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.360262 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.360273 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.462746 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.462830 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.462883 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.462922 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.462935 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.605583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.605981 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.606097 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.606187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.606266 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.708966 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.709046 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.709067 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.709089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.709106 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.811575 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.812110 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.812207 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.812303 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.812399 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.914731 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.914793 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.914815 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.914836 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:08 crc kubenswrapper[4684]: I1013 13:08:08.914850 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:08Z","lastTransitionTime":"2025-10-13T13:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.017671 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.017724 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.017740 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.017762 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.017779 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.120863 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.120969 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.120993 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.121023 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.121044 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.224685 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.224731 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.224739 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.224754 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.224763 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.327847 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.327956 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.327979 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.328004 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.328024 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.350650 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.350664 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.350879 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:09 crc kubenswrapper[4684]: E1013 13:08:09.351064 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:09 crc kubenswrapper[4684]: E1013 13:08:09.351291 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:09 crc kubenswrapper[4684]: E1013 13:08:09.351476 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.432136 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.432186 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.432198 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.432219 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.432242 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.535625 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.535676 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.535689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.535707 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.535719 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.638886 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.639006 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.639020 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.639044 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.639059 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.741773 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.741828 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.741841 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.741862 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.741877 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.845130 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.845240 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.845258 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.845284 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.845302 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.949479 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.949561 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.949580 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.949622 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:09 crc kubenswrapper[4684]: I1013 13:08:09.949643 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:09Z","lastTransitionTime":"2025-10-13T13:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.052994 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.053076 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.053100 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.053133 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.053154 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.156831 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.156960 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.156987 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.157017 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.157039 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.259939 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.260053 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.260064 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.260086 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.260100 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.350565 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:10 crc kubenswrapper[4684]: E1013 13:08:10.350831 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.362518 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.362610 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.362641 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.362680 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.362705 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.466598 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.466675 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.466698 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.466753 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.466776 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.570502 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.570582 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.570605 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.570634 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.570654 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.673161 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.673216 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.673229 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.673251 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.673264 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.777019 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.777504 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.777592 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.777679 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.777797 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.880844 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.880934 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.880948 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.880986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.881004 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.984324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.984394 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.984411 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.984438 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:10 crc kubenswrapper[4684]: I1013 13:08:10.984455 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:10Z","lastTransitionTime":"2025-10-13T13:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.088324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.088408 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.088433 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.088464 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.088483 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.191997 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.192081 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.192106 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.192132 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.192150 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.295993 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.296061 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.296085 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.296116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.296143 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.350196 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.350192 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:11 crc kubenswrapper[4684]: E1013 13:08:11.350439 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:11 crc kubenswrapper[4684]: E1013 13:08:11.350844 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.350513 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:11 crc kubenswrapper[4684]: E1013 13:08:11.351294 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.398597 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.398664 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.398683 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.398707 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.398725 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.502656 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.502725 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.502748 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.502784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.502808 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.606513 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.606600 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.606625 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.606656 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.606679 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.711089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.711155 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.711179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.711210 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.711232 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.814338 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.814401 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.814413 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.814433 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.814445 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.917019 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.917075 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.917088 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.917105 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:11 crc kubenswrapper[4684]: I1013 13:08:11.917117 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:11Z","lastTransitionTime":"2025-10-13T13:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.020141 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.020200 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.020217 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.020239 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.020257 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.123480 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.123541 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.123553 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.123570 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.123582 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.227056 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.227118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.227179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.227204 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.227258 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.330062 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.330126 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.330144 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.330172 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.330191 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.350753 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.350875 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.351675 4684 scope.go:117] "RemoveContainer" containerID="6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.433044 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.433077 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.433088 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.433103 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.433113 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.535989 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.536473 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.536486 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.536504 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.536520 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.639036 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.639079 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.639090 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.639106 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.639117 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.692153 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/1.log" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.701534 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.702496 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.726703 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.742363 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.742439 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.742455 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.742480 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.742497 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.752642 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.784074 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.784132 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.784149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.784168 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.784186 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.784675 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.799990 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.807154 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.813504 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.813569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.813588 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.813617 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.813637 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.815113 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.829834 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.835305 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.840151 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.840193 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.840206 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.840221 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.840232 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.843503 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.853252 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.854613 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.857341 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.857378 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.857386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.857399 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.857407 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.868953 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.870992 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.872280 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.872310 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.872319 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.872333 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.872344 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.883950 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.885671 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: E1013 13:08:12.885845 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.887257 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.887292 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.887304 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.887320 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.887331 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.896342 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.908591 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.919327 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.931001 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.947197 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.962075 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.980229 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:12Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.989641 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.989673 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.989681 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.989697 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:12 crc kubenswrapper[4684]: I1013 13:08:12.989706 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:12Z","lastTransitionTime":"2025-10-13T13:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.092290 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.092333 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.092341 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.092357 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.092366 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.194545 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.194583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.194593 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.194606 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.194615 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.297204 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.297262 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.297279 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.297304 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.297324 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.347153 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.350667 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.350719 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.350806 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:13 crc kubenswrapper[4684]: E1013 13:08:13.351052 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:13 crc kubenswrapper[4684]: E1013 13:08:13.351175 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:13 crc kubenswrapper[4684]: E1013 13:08:13.351311 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.362843 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.373164 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.393576 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.400521 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.400590 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.400610 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.400635 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.400654 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.419207 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.433371 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.457684 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.473549 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.488781 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.503729 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.504057 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.504135 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.504157 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.504179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.504197 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.515995 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.527569 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.545815 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.558946 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.575605 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.590571 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.601444 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.606036 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.606075 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.606086 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.606105 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.606117 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.615046 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.625371 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.706047 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/2.log" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.706568 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/1.log" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.708112 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.708141 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.708153 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.708171 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.708182 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.710701 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706" exitCode=1 Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.711187 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.711363 4684 scope.go:117] "RemoveContainer" containerID="6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.712356 4684 scope.go:117] "RemoveContainer" containerID="7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706" Oct 13 13:08:13 crc kubenswrapper[4684]: E1013 13:08:13.712637 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.729570 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.748456 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.762816 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.781848 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.796146 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.811394 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.811461 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.811479 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.811505 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.811521 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.817443 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.830738 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.850775 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.876311 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6584447a10f325cf3e6d0e71cfee04a2c5b0c97764991263bbe0f932b352c2be\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"message\\\":\\\"\\\\nI1013 13:07:58.561979 6106 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 13:07:58.561994 6106 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 13:07:58.562010 6106 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 13:07:58.562019 6106 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 13:07:58.562018 6106 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 13:07:58.562020 6106 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 13:07:58.562032 6106 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 13:07:58.562025 6106 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 13:07:58.562039 6106 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 13:07:58.562037 6106 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 13:07:58.562047 6106 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 13:07:58.562079 6106 factory.go:656] Stopping watch factory\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 13:07:58.562099 6106 ovnkube.go:599] Stopped ovnkube\\\\nI1013 13:07:58.562094 6106 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 13:07:58.562115 6106 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 13:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.895529 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.914990 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.915071 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.915096 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.915128 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.915147 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:13Z","lastTransitionTime":"2025-10-13T13:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.915977 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.937652 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.959441 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:13 crc kubenswrapper[4684]: I1013 13:08:13.998131 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:13Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.013360 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.019843 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.019941 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.019989 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.020027 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.020054 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.034785 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.055101 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.070495 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.123789 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.123855 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.123873 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.123945 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.123987 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.227465 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.227535 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.227554 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.227588 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.227627 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.331209 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.331271 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.331289 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.331334 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.331352 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.350800 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:14 crc kubenswrapper[4684]: E1013 13:08:14.351403 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.434179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.434242 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.434260 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.434286 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.434303 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.538030 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.538089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.538108 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.538133 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.538150 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.640986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.641029 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.641041 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.641056 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.641067 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.715603 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/2.log" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.719860 4684 scope.go:117] "RemoveContainer" containerID="7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706" Oct 13 13:08:14 crc kubenswrapper[4684]: E1013 13:08:14.720148 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.732446 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.743080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.743115 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.743124 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.743138 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.743148 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.749569 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.764410 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.781047 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.794402 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.816180 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.831855 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.845389 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.845426 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.845437 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.845454 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.845467 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.846512 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.868345 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.882860 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.899224 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.930501 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.948636 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.948706 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.948723 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.948747 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.948765 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:14Z","lastTransitionTime":"2025-10-13T13:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.949992 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.970859 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:14 crc kubenswrapper[4684]: I1013 13:08:14.987346 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:14Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.007018 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:15Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.018568 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:15Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.038492 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:15Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.051310 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.051419 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.051441 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.051466 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.051483 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.155217 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.155287 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.155329 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.155363 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.155387 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.257780 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.257828 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.257841 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.257860 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.257873 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.350942 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:15 crc kubenswrapper[4684]: E1013 13:08:15.351103 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.351188 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.351315 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:15 crc kubenswrapper[4684]: E1013 13:08:15.351362 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:15 crc kubenswrapper[4684]: E1013 13:08:15.351480 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.360837 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.361306 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.361583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.361768 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.361983 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.464465 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.464538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.464563 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.464594 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.464617 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.567357 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.567419 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.567487 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.567512 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.567580 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.671066 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.671127 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.671145 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.671169 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.671186 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.773670 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.774198 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.774393 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.774581 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.774828 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.878400 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.878464 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.878477 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.878496 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.878509 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.982040 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.982103 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.982121 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.982147 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:15 crc kubenswrapper[4684]: I1013 13:08:15.982164 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:15Z","lastTransitionTime":"2025-10-13T13:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.086541 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.086630 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.086655 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.086688 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.086710 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.134406 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:16 crc kubenswrapper[4684]: E1013 13:08:16.134611 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:16 crc kubenswrapper[4684]: E1013 13:08:16.135144 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:08:32.135104058 +0000 UTC m=+66.702488168 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.189521 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.189605 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.189627 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.189661 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.189683 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.293279 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.293716 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.293884 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.294069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.294201 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.350621 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:16 crc kubenswrapper[4684]: E1013 13:08:16.351198 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.367767 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.382962 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.397545 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.397604 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.397619 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.397638 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.397654 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.401875 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.430656 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.448324 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.459539 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.470419 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.484058 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.493964 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.499804 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.499843 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.499852 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.499868 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.499877 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.525152 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.539642 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.554382 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.570662 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.585090 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.601749 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.601797 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.601806 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.601835 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.601844 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.605407 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.619012 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.630292 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.643654 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:16Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.703945 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.704000 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.704010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.704027 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.704038 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.806401 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.806500 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.806515 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.806535 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.806546 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.909431 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.909479 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.909493 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.909509 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:16 crc kubenswrapper[4684]: I1013 13:08:16.909519 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:16Z","lastTransitionTime":"2025-10-13T13:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.012752 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.012806 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.012818 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.012837 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.012849 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.115808 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.115869 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.115886 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.115938 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.115956 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.219573 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.219614 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.219624 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.219639 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.219653 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.323089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.323153 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.323179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.323208 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.323229 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.349635 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.349719 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.349684 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.349875 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.350042 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.350179 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.426243 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.426292 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.426303 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.426321 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.426332 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.529230 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.529294 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.529312 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.529339 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.529362 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.632455 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.632502 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.632514 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.632530 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.632542 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.651199 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.651377 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:08:49.651342432 +0000 UTC m=+84.218726542 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.651461 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.651633 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.651730 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:49.651706192 +0000 UTC m=+84.219090302 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.734929 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.734987 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.735000 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.735025 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.735038 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.752766 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.752885 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.752982 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.752992 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753021 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753067 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753109 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753122 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753130 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753140 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:49.753119409 +0000 UTC m=+84.320503489 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753163 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:49.75315166 +0000 UTC m=+84.320535830 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753191 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:17 crc kubenswrapper[4684]: E1013 13:08:17.753216 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:08:49.753209672 +0000 UTC m=+84.320593742 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.838342 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.838412 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.838430 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.838462 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.838481 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.942165 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.942268 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.942297 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.942340 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:17 crc kubenswrapper[4684]: I1013 13:08:17.942369 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:17Z","lastTransitionTime":"2025-10-13T13:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.045925 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.045987 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.046001 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.046027 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.046044 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.149745 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.149836 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.149864 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.149938 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.149966 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.252925 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.252975 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.252988 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.253010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.253023 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.350523 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:18 crc kubenswrapper[4684]: E1013 13:08:18.350826 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.355978 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.356066 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.356090 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.356187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.356208 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.458570 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.458606 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.458617 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.458631 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.458640 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.563057 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.563171 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.563192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.563231 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.563249 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.666601 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.666663 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.666680 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.666705 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.666722 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.769833 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.769958 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.769999 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.770033 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.770055 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.873345 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.873432 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.873458 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.873499 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.873528 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.976345 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.976410 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.976421 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.976441 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:18 crc kubenswrapper[4684]: I1013 13:08:18.976454 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:18Z","lastTransitionTime":"2025-10-13T13:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.079787 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.079846 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.079862 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.079884 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.079929 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.183116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.183215 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.183241 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.183277 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.183303 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.286665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.286781 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.286808 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.286848 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.286873 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.350779 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.350869 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.350971 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:19 crc kubenswrapper[4684]: E1013 13:08:19.351102 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:19 crc kubenswrapper[4684]: E1013 13:08:19.351282 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:19 crc kubenswrapper[4684]: E1013 13:08:19.351402 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.390269 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.390312 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.390324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.390340 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.390349 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.494007 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.494058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.494075 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.494094 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.494109 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.596942 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.597024 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.597038 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.597058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.597072 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.700234 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.700302 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.700314 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.700338 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.700352 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.803926 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.804001 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.804018 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.804047 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.804068 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.907000 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.907093 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.907113 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.907142 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:19 crc kubenswrapper[4684]: I1013 13:08:19.907164 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:19Z","lastTransitionTime":"2025-10-13T13:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.010364 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.010437 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.010458 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.010485 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.010504 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.113703 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.113778 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.113796 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.113821 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.113839 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.216645 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.216726 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.216739 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.216755 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.216768 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.319772 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.319867 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.319892 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.319977 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.320005 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.349894 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:20 crc kubenswrapper[4684]: E1013 13:08:20.350049 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.423311 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.423380 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.423425 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.423461 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.423484 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.526378 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.526459 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.526494 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.526527 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.526548 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.629715 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.629795 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.629827 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.629856 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.629877 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.733396 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.733455 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.733471 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.733500 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.733516 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.837205 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.837291 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.837308 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.837333 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.837354 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.943174 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.943216 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.943234 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.944053 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:20 crc kubenswrapper[4684]: I1013 13:08:20.944091 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:20Z","lastTransitionTime":"2025-10-13T13:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.047155 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.047239 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.047252 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.047275 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.047287 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.149887 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.149990 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.150007 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.150033 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.150050 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.253959 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.254022 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.254034 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.254056 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.254071 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.350423 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.350597 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.350604 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:21 crc kubenswrapper[4684]: E1013 13:08:21.350727 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:21 crc kubenswrapper[4684]: E1013 13:08:21.350846 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:21 crc kubenswrapper[4684]: E1013 13:08:21.350970 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.356242 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.356309 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.356323 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.356351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.356421 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.460477 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.460524 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.460536 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.460555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.460569 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.563818 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.563981 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.564014 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.564045 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.564068 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.667151 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.667217 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.667239 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.667270 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.667292 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.770551 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.770620 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.770643 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.770672 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.770697 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.873537 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.873603 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.873622 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.873647 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.873665 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.977080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.977157 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.977174 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.977201 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:21 crc kubenswrapper[4684]: I1013 13:08:21.977240 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:21Z","lastTransitionTime":"2025-10-13T13:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.080407 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.080470 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.080489 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.080519 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.080545 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.183049 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.183124 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.183148 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.183179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.183202 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.287006 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.287084 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.287109 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.287139 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.287161 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.350028 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:22 crc kubenswrapper[4684]: E1013 13:08:22.350193 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.390450 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.390524 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.390543 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.390566 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.390585 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.494272 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.494347 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.494371 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.494398 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.494419 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.598068 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.598116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.598131 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.598152 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.598165 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.702004 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.702080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.702099 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.702126 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.702148 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.806019 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.806093 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.806112 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.806139 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.806158 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.909593 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.909665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.909680 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.909704 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:22 crc kubenswrapper[4684]: I1013 13:08:22.909721 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:22Z","lastTransitionTime":"2025-10-13T13:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.013334 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.013434 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.013455 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.013493 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.013531 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.116717 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.116790 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.116813 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.116842 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.116870 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.148819 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.148871 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.148888 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.148943 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.148961 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.166249 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:23Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.176559 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.176618 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.176626 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.176647 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.176659 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.194892 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:23Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.200492 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.200591 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.200612 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.200641 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.200662 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.220230 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:23Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.226643 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.226714 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.226724 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.226745 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.226760 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.244419 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:23Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.249626 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.249678 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.249696 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.249723 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.249742 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.271858 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:23Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.272142 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.274287 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.274333 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.274349 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.274372 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.274390 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.350398 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.350399 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.350419 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.350654 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.350781 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:23 crc kubenswrapper[4684]: E1013 13:08:23.350856 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.376989 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.377064 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.377088 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.377117 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.377135 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.481080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.481180 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.481199 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.481257 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.481278 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.584558 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.584638 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.584663 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.584689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.584707 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.688335 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.688421 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.688448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.688478 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.688501 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.791447 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.791557 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.791578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.791610 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.791644 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.895277 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.895366 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.895388 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.895419 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.895444 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.998771 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.998859 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.998887 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.998959 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:23 crc kubenswrapper[4684]: I1013 13:08:23.998990 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:23Z","lastTransitionTime":"2025-10-13T13:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.102894 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.103010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.103029 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.103062 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.103081 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.207260 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.207343 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.207368 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.207401 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.207426 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.310632 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.310696 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.310711 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.310734 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.310746 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.349859 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:24 crc kubenswrapper[4684]: E1013 13:08:24.350141 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.414434 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.414507 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.414527 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.414552 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.414571 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.518500 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.518579 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.518595 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.518622 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.518639 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.621773 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.621826 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.621835 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.621853 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.621865 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.724897 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.724974 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.724985 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.725010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.725026 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.828351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.828421 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.828434 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.828458 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.828472 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.931748 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.931797 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.931809 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.931828 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:24 crc kubenswrapper[4684]: I1013 13:08:24.931842 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:24Z","lastTransitionTime":"2025-10-13T13:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.035096 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.035154 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.035165 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.035186 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.035199 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.138354 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.138427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.138437 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.138467 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.138483 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.242300 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.242392 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.242416 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.242450 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.242472 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.345538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.345594 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.345607 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.345629 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.345644 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.350215 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.350339 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:25 crc kubenswrapper[4684]: E1013 13:08:25.350385 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.350221 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:25 crc kubenswrapper[4684]: E1013 13:08:25.350545 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:25 crc kubenswrapper[4684]: E1013 13:08:25.350855 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.449348 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.449423 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.449465 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.449503 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.449532 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.552964 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.553069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.553090 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.553154 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.553176 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.657261 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.657355 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.657377 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.657424 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.657436 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.761117 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.761198 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.761217 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.761246 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.761267 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.864627 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.864677 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.864689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.864711 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.864725 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.968356 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.968447 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.968472 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.968507 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:25 crc kubenswrapper[4684]: I1013 13:08:25.968534 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:25Z","lastTransitionTime":"2025-10-13T13:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.072012 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.072128 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.072149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.072178 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.072198 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.176996 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.177086 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.177112 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.177145 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.177171 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.281001 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.282078 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.282144 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.282174 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.282188 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.351934 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:26 crc kubenswrapper[4684]: E1013 13:08:26.353446 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.377585 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.385261 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.385331 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.385348 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.385788 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.385844 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.400147 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.416702 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.443719 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.462791 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.483619 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.488795 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.488870 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.488898 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.488974 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.488997 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.500913 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.516376 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.534147 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.546117 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.569836 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.591329 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.591386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.591405 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.591429 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.591446 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.599939 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.619493 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.633294 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.643678 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.653354 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.665475 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.678351 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:26Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.693838 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.693916 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.693932 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.693949 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.693960 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.799924 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.800018 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.800037 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.800065 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.800083 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.902195 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.902236 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.902246 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.902263 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:26 crc kubenswrapper[4684]: I1013 13:08:26.902273 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:26Z","lastTransitionTime":"2025-10-13T13:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.004882 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.004966 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.004982 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.005007 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.005023 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.108083 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.108146 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.108160 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.108179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.108192 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.210555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.210595 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.210607 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.210625 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.210637 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.314211 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.314281 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.314319 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.314353 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.314375 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.349968 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.350013 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.350116 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:27 crc kubenswrapper[4684]: E1013 13:08:27.350153 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:27 crc kubenswrapper[4684]: E1013 13:08:27.350292 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:27 crc kubenswrapper[4684]: E1013 13:08:27.350404 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.417173 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.417252 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.417277 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.417305 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.417325 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.520291 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.520350 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.520369 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.520392 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.520404 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.623891 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.624003 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.624022 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.624058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.624076 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.727555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.727608 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.727624 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.727644 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.727660 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.830719 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.830807 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.830836 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.830863 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.830881 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.933136 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.933183 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.933192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.933208 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:27 crc kubenswrapper[4684]: I1013 13:08:27.933218 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:27Z","lastTransitionTime":"2025-10-13T13:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.032832 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.033851 4684 scope.go:117] "RemoveContainer" containerID="7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706" Oct 13 13:08:28 crc kubenswrapper[4684]: E1013 13:08:28.034083 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.036986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.037033 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.037045 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.037068 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.037082 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.140746 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.140810 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.140830 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.140857 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.140876 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.244453 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.244513 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.244530 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.244556 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.244575 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.347174 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.347237 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.347257 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.347282 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.347305 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.350770 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:28 crc kubenswrapper[4684]: E1013 13:08:28.351014 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.450329 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.450405 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.450431 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.450460 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.450500 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.554178 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.554240 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.554257 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.554279 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.554295 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.657146 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.657213 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.657241 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.657273 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.657295 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.760225 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.760295 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.760312 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.760374 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.760393 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.864011 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.864098 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.864117 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.864142 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.864158 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.966502 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.966543 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.966555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.966571 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:28 crc kubenswrapper[4684]: I1013 13:08:28.966582 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:28Z","lastTransitionTime":"2025-10-13T13:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.068980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.069023 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.069035 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.069058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.069071 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.171533 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.171593 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.171608 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.171628 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.171643 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.279411 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.279510 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.279534 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.279570 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.279610 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.350243 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.350349 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:29 crc kubenswrapper[4684]: E1013 13:08:29.350364 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:29 crc kubenswrapper[4684]: E1013 13:08:29.350566 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.351096 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:29 crc kubenswrapper[4684]: E1013 13:08:29.351290 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.383083 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.383114 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.383123 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.383136 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.383145 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.486593 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.486646 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.486663 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.486689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.486708 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.589350 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.589400 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.589412 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.589430 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.589445 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.692011 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.692059 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.692071 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.692087 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.692099 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.795840 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.795961 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.795992 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.796022 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.796044 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.898983 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.899035 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.899056 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.899088 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:29 crc kubenswrapper[4684]: I1013 13:08:29.899109 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:29Z","lastTransitionTime":"2025-10-13T13:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.001974 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.002047 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.002067 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.002092 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.002112 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.104921 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.104948 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.104957 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.104970 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.104978 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.207484 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.207565 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.207583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.207616 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.207637 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.310493 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.310544 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.310554 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.310568 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.310578 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.350534 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:30 crc kubenswrapper[4684]: E1013 13:08:30.350660 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.413439 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.413497 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.413516 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.413539 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.413558 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.517324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.517392 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.517410 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.517439 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.517456 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.620245 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.620284 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.620298 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.620340 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.620352 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.723922 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.723971 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.724016 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.724041 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.724060 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.827385 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.827443 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.827459 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.827480 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.827497 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.930145 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.930201 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.930215 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.930232 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:30 crc kubenswrapper[4684]: I1013 13:08:30.930243 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:30Z","lastTransitionTime":"2025-10-13T13:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.033145 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.033200 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.033211 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.033231 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.033244 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.135814 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.135876 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.135886 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.135932 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.135952 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.237859 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.237927 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.237939 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.237956 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.237973 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.340651 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.340698 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.340714 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.340730 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.340740 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.349972 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.350022 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:31 crc kubenswrapper[4684]: E1013 13:08:31.350101 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.350196 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:31 crc kubenswrapper[4684]: E1013 13:08:31.350314 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:31 crc kubenswrapper[4684]: E1013 13:08:31.350375 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.443717 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.443763 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.443771 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.443784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.443795 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.545955 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.546003 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.546013 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.546032 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.546043 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.649789 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.649865 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.649885 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.649936 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.649957 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.753883 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.753959 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.753971 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.753989 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.754001 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.857567 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.857628 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.857644 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.857670 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.857687 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.960525 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.960620 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.960640 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.960671 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:31 crc kubenswrapper[4684]: I1013 13:08:31.960696 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:31Z","lastTransitionTime":"2025-10-13T13:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.063628 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.063678 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.063692 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.063709 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.063722 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.166822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.166876 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.166889 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.166931 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.166943 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.224745 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:32 crc kubenswrapper[4684]: E1013 13:08:32.224956 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:32 crc kubenswrapper[4684]: E1013 13:08:32.225024 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:09:04.225005914 +0000 UTC m=+98.792389994 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.269367 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.269410 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.269418 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.269431 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.269440 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.350092 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:32 crc kubenswrapper[4684]: E1013 13:08:32.350312 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.371983 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.372047 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.372058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.372093 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.372111 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.474542 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.474800 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.474892 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.475092 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.475211 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.579751 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.579811 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.579823 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.579843 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.579854 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.682791 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.682831 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.682841 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.682857 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.682868 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.785980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.786046 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.786065 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.786093 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.786112 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.889447 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.889512 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.889526 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.889550 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.889565 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.992587 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.992642 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.992660 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.992684 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:32 crc kubenswrapper[4684]: I1013 13:08:32.992701 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:32Z","lastTransitionTime":"2025-10-13T13:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.095427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.095471 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.095480 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.095500 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.095512 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.199945 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.199973 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.199980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.199994 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.200003 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.303425 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.303495 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.303513 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.303540 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.303559 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.350395 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.350478 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.350541 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.350638 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.350810 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.350869 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.407442 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.407498 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.407514 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.407537 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.407550 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.465724 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.465780 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.465792 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.465815 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.465829 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.481518 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:33Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.486538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.486587 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.486595 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.486615 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.486628 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.506238 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:33Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.511820 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.511862 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.511927 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.511979 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.512016 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.529192 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:33Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.534237 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.534286 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.534298 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.534315 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.534327 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.552345 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:33Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.557522 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.557572 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.557590 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.557613 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.557635 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.576390 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:33Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:33 crc kubenswrapper[4684]: E1013 13:08:33.576623 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.578770 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.578822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.578839 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.578863 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.578878 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.681895 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.681986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.682000 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.682023 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.682038 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.784862 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.784998 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.785012 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.785031 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.785045 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.887690 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.887740 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.887750 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.887768 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.887779 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.990664 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.990725 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.990741 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.990763 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:33 crc kubenswrapper[4684]: I1013 13:08:33.990784 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:33Z","lastTransitionTime":"2025-10-13T13:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.093521 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.093565 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.093576 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.093594 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.093607 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.196816 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.196868 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.196880 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.196896 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.196923 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.299999 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.300066 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.300079 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.300104 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.300120 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.350103 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:34 crc kubenswrapper[4684]: E1013 13:08:34.350343 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.402742 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.402788 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.402799 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.402820 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.402831 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.507429 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.507500 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.507512 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.507537 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.507553 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.610449 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.610503 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.610515 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.610536 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.610553 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.713180 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.713230 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.713243 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.713265 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.713280 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.795222 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/0.log" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.795286 4684 generic.go:334] "Generic (PLEG): container finished" podID="eb2c3381-fecf-46e7-a034-d3c560dff35e" containerID="41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85" exitCode=1 Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.795333 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerDied","Data":"41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.795964 4684 scope.go:117] "RemoveContainer" containerID="41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.817154 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.817559 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.817578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.817607 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.817627 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.819377 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.835772 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.848430 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.869260 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.880865 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.893717 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.906428 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.919455 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.919827 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.919869 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.919881 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.919918 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.919934 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:34Z","lastTransitionTime":"2025-10-13T13:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.930024 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.951611 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.968711 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.981850 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:34 crc kubenswrapper[4684]: I1013 13:08:34.996550 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:34Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.009523 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.022583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.022614 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.022625 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.022644 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.022658 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.025937 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.040684 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.054465 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.074352 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.124947 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.124981 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.124990 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.125005 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.125016 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.228263 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.228326 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.228341 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.228368 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.228384 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.331436 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.331577 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.331598 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.331621 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.331637 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.349888 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:35 crc kubenswrapper[4684]: E1013 13:08:35.350047 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.349891 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.350247 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:35 crc kubenswrapper[4684]: E1013 13:08:35.350479 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:35 crc kubenswrapper[4684]: E1013 13:08:35.350751 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.434876 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.434962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.434971 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.434992 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.435005 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.538115 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.538160 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.538168 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.538184 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.538195 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.641036 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.641100 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.641109 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.641128 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.641140 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.743696 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.743748 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.743760 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.743777 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.743791 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.802409 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/0.log" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.802491 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerStarted","Data":"d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.824671 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.840742 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.846847 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.846944 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.846956 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.846976 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.846990 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.860993 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.880161 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.898658 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.916846 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.931955 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.942963 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.949745 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.949785 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.949798 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.949818 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.949830 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:35Z","lastTransitionTime":"2025-10-13T13:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.960141 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.973718 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:35 crc kubenswrapper[4684]: I1013 13:08:35.987517 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.001178 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:35Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.014810 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.028736 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.056629 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.056706 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.056722 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.056746 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.056762 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.065663 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.081285 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.097362 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.112321 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.159749 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.159788 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.159796 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.159810 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.159820 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.262858 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.262967 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.262986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.263013 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.263030 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.350628 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:36 crc kubenswrapper[4684]: E1013 13:08:36.350848 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.365735 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.365772 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.365789 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.365810 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.365827 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.375558 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.391175 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.409041 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.425417 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.440722 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.466834 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.468206 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.468256 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.468268 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.468289 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.468302 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.483480 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.529657 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.546642 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.561163 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.570812 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.570852 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.570861 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.570876 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.570887 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.577072 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.591733 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.611220 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.632153 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.651267 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.673473 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.673523 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.673539 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.673565 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.673581 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.691718 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.709847 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.723218 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:36Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.776537 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.776777 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.776931 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.777052 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.777159 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.880263 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.880322 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.880340 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.880365 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.880383 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.982484 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.982519 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.982531 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.982546 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:36 crc kubenswrapper[4684]: I1013 13:08:36.982580 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:36Z","lastTransitionTime":"2025-10-13T13:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.085263 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.085301 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.085309 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.085324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.085334 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.187468 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.187518 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.187532 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.187548 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.187558 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.290237 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.290613 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.290736 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.290830 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.290946 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.350059 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.350093 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.350112 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:37 crc kubenswrapper[4684]: E1013 13:08:37.350709 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:37 crc kubenswrapper[4684]: E1013 13:08:37.350771 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:37 crc kubenswrapper[4684]: E1013 13:08:37.350499 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.392873 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.392958 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.392973 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.392999 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.393026 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.495800 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.496090 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.496257 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.496367 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.496446 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.598648 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.598941 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.599009 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.599071 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.599144 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.701234 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.701272 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.701283 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.701300 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.701311 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.803379 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.803428 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.803441 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.803457 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.803468 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.905147 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.905183 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.905192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.905208 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:37 crc kubenswrapper[4684]: I1013 13:08:37.905217 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:37Z","lastTransitionTime":"2025-10-13T13:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.007879 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.007949 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.007962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.007980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.007996 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.110652 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.110967 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.111082 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.111163 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.111250 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.214146 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.214189 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.214198 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.214212 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.214221 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.317005 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.317058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.317067 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.317080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.317089 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.350311 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:38 crc kubenswrapper[4684]: E1013 13:08:38.350472 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.420089 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.420189 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.420787 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.420841 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.420860 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.523408 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.523705 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.523784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.523881 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.524006 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.627393 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.627459 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.627474 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.627492 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.627506 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.730147 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.730182 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.730190 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.730202 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.730211 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.832510 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.832591 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.832685 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.832733 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.832767 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.935074 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.935125 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.935148 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.935168 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:38 crc kubenswrapper[4684]: I1013 13:08:38.935183 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:38Z","lastTransitionTime":"2025-10-13T13:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.036825 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.036871 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.036883 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.036916 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.036927 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.138665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.138701 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.138708 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.138720 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.138747 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.241337 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.241633 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.241726 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.241826 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.241927 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.344789 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.345151 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.345333 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.345491 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.345713 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.350080 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.350134 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:39 crc kubenswrapper[4684]: E1013 13:08:39.350189 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:39 crc kubenswrapper[4684]: E1013 13:08:39.350347 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.350524 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:39 crc kubenswrapper[4684]: E1013 13:08:39.350795 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.448707 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.448742 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.448751 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.448764 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.448772 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.551954 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.552007 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.552018 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.552043 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.552057 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.654874 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.654940 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.654952 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.654971 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.654984 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.757695 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.758095 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.758195 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.758292 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.758368 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.860993 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.861274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.861416 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.861528 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.861611 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.964672 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.964742 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.964762 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.964792 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:39 crc kubenswrapper[4684]: I1013 13:08:39.964814 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:39Z","lastTransitionTime":"2025-10-13T13:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.067667 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.067712 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.067729 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.067753 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.067768 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.171274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.171669 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.171813 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.171967 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.172108 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.275245 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.275297 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.275311 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.275329 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.275341 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.349962 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:40 crc kubenswrapper[4684]: E1013 13:08:40.350438 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.350975 4684 scope.go:117] "RemoveContainer" containerID="7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.378622 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.378673 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.378692 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.378714 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.378729 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.481540 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.481574 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.481584 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.481599 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.481608 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.583874 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.583950 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.583962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.583980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.583992 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.687063 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.687298 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.687370 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.687433 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.687495 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.790202 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.790241 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.790251 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.790265 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.790274 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.819259 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/2.log" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.821727 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.822187 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.835653 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.849090 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.861714 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.882075 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.893272 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.893323 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.893335 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.893351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.893361 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.896205 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.909705 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.923217 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.933352 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.945062 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.955401 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.976843 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.995580 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.995615 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.995627 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.995643 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.995655 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:40Z","lastTransitionTime":"2025-10-13T13:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:40 crc kubenswrapper[4684]: I1013 13:08:40.999516 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:40Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.015463 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.030692 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.039819 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.053078 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.063640 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.073026 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.098096 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.098375 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.098467 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.098583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.098675 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.202009 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.202179 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.202200 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.202415 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.202431 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.306083 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.306127 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.306135 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.306151 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.306161 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.349695 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.349730 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:41 crc kubenswrapper[4684]: E1013 13:08:41.349863 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:41 crc kubenswrapper[4684]: E1013 13:08:41.349984 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.350318 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:41 crc kubenswrapper[4684]: E1013 13:08:41.350456 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.408532 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.408589 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.408602 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.408625 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.408638 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.512217 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.512289 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.512308 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.512331 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.512349 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.615048 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.615099 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.615111 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.615133 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.615146 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.718020 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.718081 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.718098 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.718123 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.718140 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.820840 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.820879 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.820892 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.820940 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.820953 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.833530 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/3.log" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.834586 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/2.log" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.838060 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" exitCode=1 Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.838119 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.838180 4684 scope.go:117] "RemoveContainer" containerID="7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.839857 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:08:41 crc kubenswrapper[4684]: E1013 13:08:41.840528 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.863932 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.882310 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.905754 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.922483 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.923452 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.923505 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.923516 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.923536 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.923553 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:41Z","lastTransitionTime":"2025-10-13T13:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.938776 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.954540 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:41 crc kubenswrapper[4684]: I1013 13:08:41.972364 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:41Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.003782 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7bfb6f8dae4bbcf02203493331e30e2a44f4dc2bc66bb8c69e0e9c28c6dd6706\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"message\\\":\\\"d680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007756c7b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9001,TargetPort:{0 9001 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{k8s-app: machine-config-controller,},ClusterIP:10.217.5.16,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.16],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Conditio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:41Z\\\",\\\"message\\\":\\\"65] Adding new object: *v1.Pod openshift-image-registry/node-ca-fqh5v\\\\nI1013 13:08:41.127271 6659 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1013 13:08:41.127294 6659 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-fqh5v in node crc\\\\nI1013 13:08:41.127303 6659 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-fqh5v after 0 failed attempt(s)\\\\nI1013 13:08:41.127296 6659 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.019650 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.035348 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.039686 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.039751 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.039765 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.039784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.039800 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.051158 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.064694 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.079520 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.097320 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.112336 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.132844 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.142698 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.142745 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.142758 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.142775 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.142786 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.146528 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.167995 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.245270 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.245314 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.245327 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.245346 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.245358 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.348745 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.348799 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.348817 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.348841 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.348858 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.349777 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:42 crc kubenswrapper[4684]: E1013 13:08:42.349986 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.452695 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.452769 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.452797 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.452830 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.452852 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.556577 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.556675 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.556771 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.556857 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.556877 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.660228 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.660520 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.660544 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.660566 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.660581 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.763426 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.763494 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.763513 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.763541 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.763558 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.844328 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/3.log" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.849829 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:08:42 crc kubenswrapper[4684]: E1013 13:08:42.850104 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.866988 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.867049 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.867067 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.867097 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.867119 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.870559 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.890892 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.907967 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.926703 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.944433 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.959345 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.969517 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.969569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.969578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.969590 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.969600 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:42Z","lastTransitionTime":"2025-10-13T13:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.970833 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.980111 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:42 crc kubenswrapper[4684]: I1013 13:08:42.996205 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:41Z\\\",\\\"message\\\":\\\"65] Adding new object: *v1.Pod openshift-image-registry/node-ca-fqh5v\\\\nI1013 13:08:41.127271 6659 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1013 13:08:41.127294 6659 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-fqh5v in node crc\\\\nI1013 13:08:41.127303 6659 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-fqh5v after 0 failed attempt(s)\\\\nI1013 13:08:41.127296 6659 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:42Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.006449 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.026658 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.037088 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.049818 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.060377 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.069119 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.071711 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.071740 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.071751 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.071766 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.071776 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.076491 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.085648 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.092453 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.173936 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.174008 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.174030 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.174058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.174075 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.278186 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.278639 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.278807 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.278989 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.279138 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.350172 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.350202 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:43 crc kubenswrapper[4684]: E1013 13:08:43.350384 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.350202 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:43 crc kubenswrapper[4684]: E1013 13:08:43.350532 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:43 crc kubenswrapper[4684]: E1013 13:08:43.350606 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.382587 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.382648 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.382665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.382689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.382706 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.485776 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.485954 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.485979 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.486009 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.486029 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.589193 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.589258 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.589276 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.589300 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.589317 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.692408 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.692448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.692458 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.692477 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.692489 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.795501 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.795568 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.795583 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.795603 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.795618 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.899470 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.899535 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.899553 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.899578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.899597 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.956352 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.956757 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.956948 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.957111 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.957247 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:43 crc kubenswrapper[4684]: E1013 13:08:43.976274 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.981036 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.981087 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.981103 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.981129 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:43 crc kubenswrapper[4684]: I1013 13:08:43.981147 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:43Z","lastTransitionTime":"2025-10-13T13:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: E1013 13:08:44.002037 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:43Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.007546 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.007681 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.007767 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.007846 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.007935 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: E1013 13:08:44.022552 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:44Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.026723 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.026832 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.026995 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.027083 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.027333 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: E1013 13:08:44.043676 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:44Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.048237 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.048282 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.048294 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.048313 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.048325 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: E1013 13:08:44.061719 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:44Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:44 crc kubenswrapper[4684]: E1013 13:08:44.062016 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.063872 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.063939 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.063962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.063984 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.064002 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.169135 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.169199 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.169216 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.169243 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.169260 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.272122 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.272161 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.272180 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.272193 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.272208 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.349807 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:44 crc kubenswrapper[4684]: E1013 13:08:44.349999 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.375495 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.375538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.375554 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.375575 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.375589 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.480837 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.480932 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.480951 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.480978 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.481003 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.585048 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.585105 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.585119 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.585140 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.585152 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.688582 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.688640 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.688653 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.688676 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.688690 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.791981 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.792050 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.792087 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.792107 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.792120 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.895351 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.895748 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.895848 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.895953 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.896087 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.999014 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.999052 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.999063 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.999082 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:44 crc kubenswrapper[4684]: I1013 13:08:44.999128 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:44Z","lastTransitionTime":"2025-10-13T13:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.102975 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.103048 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.103074 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.103106 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.103128 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.208063 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.208116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.208133 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.208156 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.208175 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.311334 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.311460 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.311480 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.311511 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.311532 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.350506 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.350506 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:45 crc kubenswrapper[4684]: E1013 13:08:45.350752 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.350528 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:45 crc kubenswrapper[4684]: E1013 13:08:45.350886 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:45 crc kubenswrapper[4684]: E1013 13:08:45.351107 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.415083 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.415192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.415214 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.415248 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.415268 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.519838 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.520326 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.520489 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.520707 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.520858 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.624099 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.624328 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.624427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.624528 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.624683 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.728296 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.728620 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.728732 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.728848 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.728968 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.832123 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.832187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.832202 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.832264 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.832283 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.936072 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.936130 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.936144 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.936174 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:45 crc kubenswrapper[4684]: I1013 13:08:45.936191 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:45Z","lastTransitionTime":"2025-10-13T13:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.039687 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.040244 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.040260 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.040281 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.040295 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.143895 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.144265 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.144448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.144559 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.144655 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.248485 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.248575 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.248606 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.248665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.248691 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.349931 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:46 crc kubenswrapper[4684]: E1013 13:08:46.350662 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.351867 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.351962 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.351980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.352005 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.352024 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.369773 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.398336 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:41Z\\\",\\\"message\\\":\\\"65] Adding new object: *v1.Pod openshift-image-registry/node-ca-fqh5v\\\\nI1013 13:08:41.127271 6659 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1013 13:08:41.127294 6659 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-fqh5v in node crc\\\\nI1013 13:08:41.127303 6659 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-fqh5v after 0 failed attempt(s)\\\\nI1013 13:08:41.127296 6659 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.413725 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.439968 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.456248 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.456304 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.456319 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.456343 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.456358 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.457446 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.478506 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.493880 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.506241 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.520820 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.580998 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.581052 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.581063 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.581085 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.581099 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.587549 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.601420 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.616453 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.630496 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.647261 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.664289 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.679500 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.683667 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.683712 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.683724 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.683747 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.683760 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.697844 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.713242 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:46Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.787191 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.787248 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.787258 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.787277 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.787288 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.889694 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.890145 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.890223 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.890308 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.890564 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.993216 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.993560 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.993626 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.993723 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:46 crc kubenswrapper[4684]: I1013 13:08:46.993803 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:46Z","lastTransitionTime":"2025-10-13T13:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.095916 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.095960 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.095972 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.095991 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.096003 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.198365 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.198412 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.198425 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.198447 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.198459 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.300720 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.300808 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.300817 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.300835 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.300847 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.350350 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.350429 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.350604 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:47 crc kubenswrapper[4684]: E1013 13:08:47.350713 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:47 crc kubenswrapper[4684]: E1013 13:08:47.350887 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:47 crc kubenswrapper[4684]: E1013 13:08:47.351063 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.403848 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.403913 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.403928 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.403949 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.403966 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.506678 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.506714 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.506722 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.506738 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.506748 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.616551 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.616987 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.617078 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.617159 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.617221 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.720923 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.721001 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.721294 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.721323 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.721338 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.824192 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.824256 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.824272 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.824295 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.824310 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.927924 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.928450 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.928548 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.928647 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:47 crc kubenswrapper[4684]: I1013 13:08:47.928745 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:47Z","lastTransitionTime":"2025-10-13T13:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.032061 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.032139 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.032163 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.032236 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.032261 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.135980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.136019 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.136031 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.136048 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.136058 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.240274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.240381 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.240399 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.240422 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.240440 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.343577 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.344307 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.344405 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.344519 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.344611 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.350196 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:48 crc kubenswrapper[4684]: E1013 13:08:48.350460 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.447737 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.447793 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.447806 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.447829 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.447841 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.552567 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.552621 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.552632 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.552653 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.552696 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.656710 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.656789 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.656805 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.656828 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.656847 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.760588 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.760688 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.760708 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.760738 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.760757 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.864471 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.864949 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.865060 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.865171 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.865261 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.968895 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.968975 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.969014 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.969035 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:48 crc kubenswrapper[4684]: I1013 13:08:48.969050 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:48Z","lastTransitionTime":"2025-10-13T13:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.072255 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.072326 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.072342 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.072365 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.072380 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.175941 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.176267 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.176360 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.176474 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.176564 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.280044 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.280120 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.280136 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.280159 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.280173 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.350412 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.350511 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.350435 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.350633 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.350850 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.351178 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.384247 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.384306 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.384319 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.384343 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.384358 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.488430 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.488484 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.488498 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.488517 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.488531 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.591632 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.591684 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.591693 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.591708 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.591717 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.693477 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.693747 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.693815 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.693916 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.693991 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.719279 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.719399 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.719517 4684 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.719565 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.719551783 +0000 UTC m=+148.286935853 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.719772 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.719747519 +0000 UTC m=+148.287131589 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.797075 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.797485 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.797677 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.797948 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.798152 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.820064 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.820284 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.820347 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.820368 4684 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.820463 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.820434898 +0000 UTC m=+148.387819008 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.820752 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.821009 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.820942 4684 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.821428 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.821400848 +0000 UTC m=+148.388784958 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.821175 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.821810 4684 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.821987 4684 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:49 crc kubenswrapper[4684]: E1013 13:08:49.822170 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.82215173 +0000 UTC m=+148.389535830 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.901482 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.901552 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.901569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.901597 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:49 crc kubenswrapper[4684]: I1013 13:08:49.901630 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:49Z","lastTransitionTime":"2025-10-13T13:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.005207 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.005274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.005294 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.005319 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.005336 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.107987 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.108069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.108113 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.108136 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.108149 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.212307 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.212462 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.212484 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.212508 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.212526 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.315986 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.316058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.316076 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.316102 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.316121 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.350468 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:50 crc kubenswrapper[4684]: E1013 13:08:50.351095 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.368098 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.418484 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.418941 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.418960 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.418983 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.418998 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.521659 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.521708 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.521723 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.521742 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.521758 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.624388 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.624459 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.624481 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.624506 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.624523 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.728038 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.728096 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.728110 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.728132 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.728146 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.835191 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.835301 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.835329 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.835384 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.835409 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.938675 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.938753 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.938779 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.938808 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:50 crc kubenswrapper[4684]: I1013 13:08:50.938830 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:50Z","lastTransitionTime":"2025-10-13T13:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.041848 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.042172 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.042285 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.042466 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.042590 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.145760 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.146214 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.146391 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.146552 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.146765 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.250364 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.250965 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.251264 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.251460 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.251657 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.350084 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.350088 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:51 crc kubenswrapper[4684]: E1013 13:08:51.350655 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:51 crc kubenswrapper[4684]: E1013 13:08:51.350674 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.350135 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:51 crc kubenswrapper[4684]: E1013 13:08:51.352085 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.355284 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.355342 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.355365 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.355394 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.355417 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.458869 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.458961 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.458985 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.459013 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.459030 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.562876 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.563318 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.563461 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.563606 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.563733 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.666993 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.667334 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.667463 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.667581 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.667666 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.770320 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.770706 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.771198 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.771615 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.771945 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.875765 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.875840 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.875853 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.875875 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.875890 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.979169 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.979251 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.979274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.979305 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:51 crc kubenswrapper[4684]: I1013 13:08:51.979326 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:51Z","lastTransitionTime":"2025-10-13T13:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.083423 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.083466 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.083478 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.083496 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.083507 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.186428 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.186492 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.186514 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.186532 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.186545 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.288867 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.288977 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.288999 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.289027 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.289050 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.350387 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:52 crc kubenswrapper[4684]: E1013 13:08:52.350618 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.394281 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.394316 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.394324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.394339 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.394348 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.497855 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.497924 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.497933 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.497954 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.497966 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.601048 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.601104 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.601118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.601141 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.601157 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.704069 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.704141 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.704156 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.704178 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.704191 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.808475 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.808549 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.808567 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.808592 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.808609 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.912455 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.912541 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.912566 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.912595 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:52 crc kubenswrapper[4684]: I1013 13:08:52.912615 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:52Z","lastTransitionTime":"2025-10-13T13:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.016077 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.016219 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.016296 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.016336 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.016356 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.119894 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.120002 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.120011 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.120030 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.120041 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.222553 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.222592 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.222601 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.222615 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.222624 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.324919 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.324973 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.324987 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.325004 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.325017 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.349697 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.349747 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.349711 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:53 crc kubenswrapper[4684]: E1013 13:08:53.349840 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:53 crc kubenswrapper[4684]: E1013 13:08:53.349984 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:53 crc kubenswrapper[4684]: E1013 13:08:53.350092 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.429060 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.429125 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.429140 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.429161 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.429175 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.532288 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.532630 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.532800 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.533077 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.533267 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.636741 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.637213 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.637452 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.637656 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.637847 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.741444 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.741994 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.742149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.742352 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.742579 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.846336 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.846420 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.846449 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.846491 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.846521 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.950001 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.950077 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.950099 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.950130 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:53 crc kubenswrapper[4684]: I1013 13:08:53.950152 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:53Z","lastTransitionTime":"2025-10-13T13:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.053650 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.053735 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.053775 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.053795 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.053808 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.151457 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.151527 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.151546 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.151570 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.151588 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.172186 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.178362 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.178430 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.178445 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.178469 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.178481 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.196805 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.201896 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.201967 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.201980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.201998 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.202010 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.221436 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.227018 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.227057 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.227087 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.227116 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.227132 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.245704 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.249835 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.249885 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.249922 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.249952 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.249970 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.269308 4684 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7a36ef7b-b5c3-4d0b-98f0-0d4235840e71\\\",\\\"systemUUID\\\":\\\"0f296227-953c-4cb6-a3a1-229df6b9f745\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:54Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.269598 4684 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.272213 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.272310 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.272379 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.272445 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.272505 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.350719 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:54 crc kubenswrapper[4684]: E1013 13:08:54.351023 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.375209 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.375273 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.375295 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.375324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.375345 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.478058 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.478132 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.478148 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.478172 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.478189 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.580978 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.581046 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.581070 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.581101 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.581123 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.684242 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.684304 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.684328 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.684358 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.684381 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.787485 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.787544 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.787569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.787593 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.787612 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.891537 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.891618 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.891646 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.891680 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.891704 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.994633 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.994838 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.994863 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.994886 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:54 crc kubenswrapper[4684]: I1013 13:08:54.994934 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:54Z","lastTransitionTime":"2025-10-13T13:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.098078 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.098153 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.098167 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.098187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.098198 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.201167 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.201239 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.201262 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.201292 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.201316 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.304050 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.304132 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.304153 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.304175 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.304193 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.349796 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.349867 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:55 crc kubenswrapper[4684]: E1013 13:08:55.349973 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.349827 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:55 crc kubenswrapper[4684]: E1013 13:08:55.350112 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:55 crc kubenswrapper[4684]: E1013 13:08:55.350144 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.407112 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.407177 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.407197 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.407223 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.407243 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.510172 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.510271 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.510310 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.510346 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.510373 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.613675 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.613742 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.613759 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.613785 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.613803 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.717292 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.717350 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.717372 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.717400 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.717421 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.820329 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.820387 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.820404 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.820427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.820444 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.923611 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.923662 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.923677 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.923697 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:55 crc kubenswrapper[4684]: I1013 13:08:55.923711 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:55Z","lastTransitionTime":"2025-10-13T13:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.026922 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.026965 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.026976 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.026993 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.027005 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.130070 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.130118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.130134 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.130157 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.130173 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.232647 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.232722 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.232748 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.232771 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.232787 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.336291 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.336348 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.336366 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.336389 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.336405 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.350490 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:56 crc kubenswrapper[4684]: E1013 13:08:56.350679 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.367689 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p9ngm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"310416cc-40e9-4131-a1fc-535d397195b7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff323b12155b0188feca3338c6f9dae792620b68fb1721b9ccc7e6a39ab061c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks5cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p9ngm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.391110 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r7wd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb2c3381-fecf-46e7-a034-d3c560dff35e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:33Z\\\",\\\"message\\\":\\\"2025-10-13T13:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb\\\\n2025-10-13T13:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_65747e7a-437f-4e09-a114-f93b4f4df3fb to /host/opt/cni/bin/\\\\n2025-10-13T13:07:48Z [verbose] multus-daemon started\\\\n2025-10-13T13:07:48Z [verbose] Readiness Indicator file check\\\\n2025-10-13T13:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:08:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2dpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r7wd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.408567 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fqh5v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee89fde6-f464-42d7-aa10-83a15b2bf981\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbfce9f780eebab96d63c1ad46a8753b8ef750b3933b90802dd1cfb783e195cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7spsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fqh5v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.432308 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8da4405-4d72-4894-acaf-cc0fd1183141\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3949e7bcb944fbb82a0418e2d7ab6f88092e9db47402e66ca22d0b7d65d42e59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcef78146b390275c3e5123f0695f43e139e8fe602c7f4c005e9b60261cb4b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ba4f8b6eba02d16ebc6ba24930fdfb120cbaaf308be015514a112f56501dfd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a5e250ce84efdc3c0690e1589acbe374224a91d6f94f4849a1ccb381d6758ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c6670280b76ee3e2ca9dfb66550ba314e531a7b0adfd75ea929ae4933d7adb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08c689b1f2c2e01874e8b9a7a430c58a182b06a4a863829d980b379c8a6c7338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://688b8b119ba5438121a1e4b4d6fbfdc1518754c5f64235addf4e69c3f75b5b8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9f279cdc2b0a0a335e5f6caddbe68114222f90a5144e7b476e02fbc5bb857d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.439528 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.439572 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.439585 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.439601 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.439615 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.452402 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12fc1b78-8823-4fba-a5a3-65e4f91834a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237e260dc149138cdce8fcf0d68f01539f3fa0a19cf35dfde12932c70d16caa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4a433a2870576be6426fcd77d0f92bb461a649a5f5202bf98ebe054194ac401\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c13823153ceb973ecaaca29777ccb2ec9e86e200e129da1e705e217deb389fc7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9698615eebbb531c4e580dc8d5a2890514594ba0f99819019c2e605d1d229038\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d678330c37cf5501e0a0d12b1cb9b4f811979d79c3696fe3e6e12c0f700a2174\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1013 13:07:40.015243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1013 13:07:40.018082 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2997848581/tls.crt::/tmp/serving-cert-2997848581/tls.key\\\\\\\"\\\\nI1013 13:07:45.785340 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1013 13:07:45.788326 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1013 13:07:45.788344 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1013 13:07:45.788370 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1013 13:07:45.788377 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1013 13:07:45.796881 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1013 13:07:45.796994 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797028 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 13:07:45.797055 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 13:07:45.797083 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 13:07:45.797109 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nI1013 13:07:45.797003 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1013 13:07:45.797960 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1013 13:07:45.801624 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://847d04d30a9085dd3f1ba5ede5e8717984da89b05b3b5899ff4dddcbcdbef3b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75630b0dab71f2b17a8663f493fc0bcca2f195f76906ead2b3181f48ec010f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.471613 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0834fa3137307bf6f25e5bac2222f68ea6a49cff425a8f777cdf59829ade1277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.488200 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.500102 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128165bcb51d89162119aa035cdda241d02c88e52681ceb773e5bb0d1bc8a2cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.513556 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f30c0faeaedfddd3b7b11c7018fab5cbeb44e80782d1d7999766a20fd1d9838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5b6af7e2ceab1f3cf49516bbd0152c7031e079294946bdbd413fc193dfe1ef68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.525692 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.537014 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e54ad64a-6df7-4082-afde-d56463121b3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34ffcea4752cdcc693a6145f338323953acff72403a45f1bccb1953fe0d99a49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h9jv8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wns5s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.541607 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.541641 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.541655 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.541674 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.541689 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.549809 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5w59x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2434fbe2-6014-4914-8ed3-c5d18e053150\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37e1b16ab6d4302d4582315232e06a67e3573206df4bc85adf8385714315d8ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99f7025141e3bc97007bd5ee27a321482d80d6f6bc450d3e0d63791a694aac7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a67459a447a2d8edb7c349e4adf6284e7aff1360325b40e8f33c54d208306908\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c919241997c1ff6a150d493e8c161e46aa80289419542a5f50fd1e92fedfd1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2fbef81fe9bff3e4ae335211ecb1008040355a50aafaec9ac883203bb9c900da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5141866d19dde0b1f25e3fbd01649bd601c97a1b7e748ff66ff1193542a41318\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f602fcb829e9da7102488b07405dbe253afbc4636b82acf235c3cba7f941d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7r6r4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:46Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5w59x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.562291 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9150445c-49fc-46c8-b101-d672f0485cbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2t2q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-mlkgd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.573969 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a0fc343-445c-408e-b1d3-aaf79ca3695b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://012a38cb4bafb7cf4b300f20b1d51b0af45dbcf83ac534f47fd0be4914cc470a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b2142d8d94e4332daac837f6666cb8d1ebf547154443c6923ed086c9178c21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b2142d8d94e4332daac837f6666cb8d1ebf547154443c6923ed086c9178c21f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.587305 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c9755dd-919e-4e1f-bbac-7fd483b2781b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e163267f5ba275c1c0248cd6675d7c1acc5cc175e152f562cdbd17c9625b49b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f8f501e3799daafb1017a2d963b4166fdf833081a53b570b5220dd5d86dc3bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23cde65ec54ae6a3d5df39b46ab01c6c41d38c9626b25c5cc9a17a3dbfa87a29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e645990118f58d5fcc55952caba9a9e459a6db2c68a5c88ffbb104fb18a0da1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.601981 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.612380 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"16ece342-77d1-4450-ac9a-d94b19143021\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:08:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d4b7df401f831751f813a17cc0e75694cf4de341ec37ef97818003ffcf6d598\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f056170937edae1ef2996bc07287e473f8a426f39ab52b9d448d7ef471bce70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1665fc86f1a0d4d2c6b1cb02d8c96e6c23b3d77c076af39784b0c4af37355055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://854718f798d389a60cee73604074fc1d3c020fa96fee7f8d737f9a7912a90d94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.627589 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b180ad7-c68c-4234-9b7b-aa938e5ad590\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T13:08:41Z\\\",\\\"message\\\":\\\"65] Adding new object: *v1.Pod openshift-image-registry/node-ca-fqh5v\\\\nI1013 13:08:41.127271 6659 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.43:8798: 10.217.4.43:9001:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1013 13:08:41.127294 6659 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-fqh5v in node crc\\\\nI1013 13:08:41.127303 6659 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-fqh5v after 0 failed attempt(s)\\\\nI1013 13:08:41.127296 6659 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-daemon]} name:Service_openshift-machine-config-operator/machine-config-daemon_\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T13:08:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T13:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T13:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mnvbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-9sq8c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.639819 4684 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"035a2687-3587-4656-887b-f8de9008fbfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T13:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eaae0eaddae00f06f71c999c3873c4824f1223c1cfc38e15faa77ba234c0cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52acb45825b0c2671ae8440367afd1e550fb33ea3e6cd834774d01f4055e210\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T13:07:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4dfv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T13:07:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fgzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T13:08:56Z is after 2025-08-24T17:21:41Z" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.643569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.643605 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.643620 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.643638 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.643650 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.746697 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.746758 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.746776 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.746798 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.746814 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.850173 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.850449 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.850554 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.850636 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.850709 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.953652 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.954121 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.954287 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.954432 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:56 crc kubenswrapper[4684]: I1013 13:08:56.954560 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:56Z","lastTransitionTime":"2025-10-13T13:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.057403 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.057859 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.058114 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.058443 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.058749 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.162536 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.162869 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.163052 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.163193 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.163317 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.266209 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.266578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.266972 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.267304 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.267451 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.349667 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:57 crc kubenswrapper[4684]: E1013 13:08:57.349857 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.349687 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:57 crc kubenswrapper[4684]: E1013 13:08:57.350230 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.351096 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:57 crc kubenswrapper[4684]: E1013 13:08:57.351418 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.351662 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:08:57 crc kubenswrapper[4684]: E1013 13:08:57.352021 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.371516 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.371571 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.371589 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.371612 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.371632 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.475150 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.475511 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.475610 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.475708 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.475796 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.577520 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.577555 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.577564 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.577576 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.577586 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.680015 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.680063 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.680079 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.680104 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.680121 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.782176 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.782204 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.782213 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.782224 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.782232 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.885029 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.885078 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.885094 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.885118 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.885137 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.987747 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.987775 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.987785 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.987798 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:57 crc kubenswrapper[4684]: I1013 13:08:57.987807 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:57Z","lastTransitionTime":"2025-10-13T13:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.090551 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.090641 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.090661 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.090692 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.090711 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.193657 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.193713 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.193730 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.193753 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.193770 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.297002 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.297072 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.297090 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.297114 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.297132 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.350150 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:08:58 crc kubenswrapper[4684]: E1013 13:08:58.350334 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.400330 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.400387 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.400405 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.400427 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.400444 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.504114 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.504183 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.504201 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.504225 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.504245 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.607037 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.607103 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.607123 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.607149 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.607168 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.710700 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.710774 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.710794 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.710822 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.710839 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.814449 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.814525 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.814599 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.814634 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.814657 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.917748 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.917812 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.917831 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.917855 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:58 crc kubenswrapper[4684]: I1013 13:08:58.917872 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:58Z","lastTransitionTime":"2025-10-13T13:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.021084 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.021129 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.021144 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.021161 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.021174 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.124563 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.125324 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.125374 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.125409 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.125427 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.229535 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.229597 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.229614 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.229637 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.229655 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.333115 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.333527 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.333684 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.333823 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.333993 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.350081 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:08:59 crc kubenswrapper[4684]: E1013 13:08:59.350275 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.350110 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.350727 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:08:59 crc kubenswrapper[4684]: E1013 13:08:59.350794 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:08:59 crc kubenswrapper[4684]: E1013 13:08:59.351254 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.437591 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.437717 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.437743 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.437774 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.437799 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.540671 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.540738 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.540754 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.540778 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.540798 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.644838 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.644931 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.644944 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.644966 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.644979 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.747578 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.747645 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.747665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.747690 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.747708 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.850729 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.850771 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.850784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.850802 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.850816 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.954018 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.954080 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.954097 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.954123 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:08:59 crc kubenswrapper[4684]: I1013 13:08:59.954140 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:08:59Z","lastTransitionTime":"2025-10-13T13:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.056735 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.056788 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.056805 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.056830 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.056850 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.160015 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.160432 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.160538 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.160631 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.160734 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.265010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.265358 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.265595 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.265781 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.265996 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.350461 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:00 crc kubenswrapper[4684]: E1013 13:09:00.350871 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.369392 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.369449 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.369458 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.369478 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.369490 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.473345 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.473394 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.473406 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.473423 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.473435 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.576503 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.576624 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.576643 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.576714 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.576736 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.679676 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.679751 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.679774 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.679803 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.679826 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.783200 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.783270 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.783289 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.783320 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.783340 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.886187 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.886235 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.886251 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.886274 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.886294 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.989114 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.989186 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.989204 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.989238 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:00 crc kubenswrapper[4684]: I1013 13:09:00.989259 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:00Z","lastTransitionTime":"2025-10-13T13:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.092947 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.093009 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.093025 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.093052 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.093074 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.197726 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.197794 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.197812 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.197840 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.197858 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.300784 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.300848 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.300859 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.300883 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.300897 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.349864 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.349970 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.349972 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:01 crc kubenswrapper[4684]: E1013 13:09:01.350285 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:01 crc kubenswrapper[4684]: E1013 13:09:01.350437 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:01 crc kubenswrapper[4684]: E1013 13:09:01.350542 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.403885 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.403991 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.404012 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.404040 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.404060 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.507302 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.507386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.507409 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.507445 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.507469 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.613621 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.613689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.613702 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.613722 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.613736 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.716840 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.716892 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.716928 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.716953 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.716969 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.820412 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.820475 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.820491 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.820513 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.820531 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.923191 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.923249 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.923261 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.923300 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:01 crc kubenswrapper[4684]: I1013 13:09:01.923312 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:01Z","lastTransitionTime":"2025-10-13T13:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.026569 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.026640 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.026665 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.026694 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.026716 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.130261 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.130316 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.130335 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.130359 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.130380 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.233261 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.233332 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.233355 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.233383 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.233404 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.335290 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.335581 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.335672 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.335756 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.335826 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.349711 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:02 crc kubenswrapper[4684]: E1013 13:09:02.350129 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.438648 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.438704 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.438726 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.438750 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.438764 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.541624 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.541673 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.541689 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.541709 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.541725 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.644929 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.644969 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.644980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.644993 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.645004 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.747688 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.747734 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.747746 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.747761 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.747776 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.850396 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.850443 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.850453 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.850469 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.850483 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.953515 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.953565 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.953586 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.953608 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:02 crc kubenswrapper[4684]: I1013 13:09:02.953620 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:02Z","lastTransitionTime":"2025-10-13T13:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.057150 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.057239 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.057264 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.057290 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.057314 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.160980 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.161083 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.161106 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.161138 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.161163 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.263769 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.263813 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.263825 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.263842 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.263855 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.350027 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.350164 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:03 crc kubenswrapper[4684]: E1013 13:09:03.350239 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.350027 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:03 crc kubenswrapper[4684]: E1013 13:09:03.350374 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:03 crc kubenswrapper[4684]: E1013 13:09:03.350504 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.366395 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.366462 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.366486 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.366514 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.366535 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.469503 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.469568 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.469586 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.469609 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.469625 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.572878 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.572971 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.572989 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.573010 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.573025 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.675710 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.675793 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.675819 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.675851 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.675873 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.779528 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.779609 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.779633 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.779663 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.779684 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.883276 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.883348 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.883386 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.883421 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.883449 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.986556 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.986635 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.986699 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.986732 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:03 crc kubenswrapper[4684]: I1013 13:09:03.986752 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:03Z","lastTransitionTime":"2025-10-13T13:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.089417 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.089448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.089455 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.089468 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.089477 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.192653 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.192727 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.192744 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.192770 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.192790 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.296338 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.296403 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.296426 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.296453 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.296471 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.313297 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:04 crc kubenswrapper[4684]: E1013 13:09:04.313494 4684 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:09:04 crc kubenswrapper[4684]: E1013 13:09:04.313611 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs podName:9150445c-49fc-46c8-b101-d672f0485cbb nodeName:}" failed. No retries permitted until 2025-10-13 13:10:08.313585912 +0000 UTC m=+162.880970022 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs") pod "network-metrics-daemon-mlkgd" (UID: "9150445c-49fc-46c8-b101-d672f0485cbb") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.350164 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:04 crc kubenswrapper[4684]: E1013 13:09:04.350413 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.399344 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.399414 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.399434 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.399459 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.399477 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.502465 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.502551 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.502568 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.502593 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.502614 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.605643 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.605694 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.605715 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.605737 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.605754 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.643448 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.643504 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.643520 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.643544 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.643561 4684 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T13:09:04Z","lastTransitionTime":"2025-10-13T13:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.714338 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2"] Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.715438 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.722926 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.723321 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.723429 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.723865 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.769651 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.769626251 podStartE2EDuration="1m18.769626251s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.752573205 +0000 UTC m=+99.319957285" watchObservedRunningTime="2025-10-13 13:09:04.769626251 +0000 UTC m=+99.337010331" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.820495 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68351d4-280f-4da1-970a-2831cad1eada-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.820563 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c68351d4-280f-4da1-970a-2831cad1eada-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.820628 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c68351d4-280f-4da1-970a-2831cad1eada-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.820668 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c68351d4-280f-4da1-970a-2831cad1eada-service-ca\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.820737 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68351d4-280f-4da1-970a-2831cad1eada-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.834812 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-p9ngm" podStartSLOduration=79.834786613 podStartE2EDuration="1m19.834786613s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.818020666 +0000 UTC m=+99.385404746" watchObservedRunningTime="2025-10-13 13:09:04.834786613 +0000 UTC m=+99.402170693" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.846550 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-r7wd2" podStartSLOduration=78.846526266 podStartE2EDuration="1m18.846526266s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.835769543 +0000 UTC m=+99.403153623" watchObservedRunningTime="2025-10-13 13:09:04.846526266 +0000 UTC m=+99.413910346" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.908203 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=79.908187629 podStartE2EDuration="1m19.908187629s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.908147938 +0000 UTC m=+99.475532028" watchObservedRunningTime="2025-10-13 13:09:04.908187629 +0000 UTC m=+99.475571699" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.908454 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-fqh5v" podStartSLOduration=79.908450307 podStartE2EDuration="1m19.908450307s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.847130144 +0000 UTC m=+99.414514244" watchObservedRunningTime="2025-10-13 13:09:04.908450307 +0000 UTC m=+99.475834377" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.921341 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68351d4-280f-4da1-970a-2831cad1eada-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.921616 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c68351d4-280f-4da1-970a-2831cad1eada-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.921759 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c68351d4-280f-4da1-970a-2831cad1eada-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.921876 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c68351d4-280f-4da1-970a-2831cad1eada-service-ca\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.922025 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68351d4-280f-4da1-970a-2831cad1eada-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.921667 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c68351d4-280f-4da1-970a-2831cad1eada-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.921827 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c68351d4-280f-4da1-970a-2831cad1eada-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.922743 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c68351d4-280f-4da1-970a-2831cad1eada-service-ca\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.928788 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68351d4-280f-4da1-970a-2831cad1eada-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.936552 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podStartSLOduration=79.936538424 podStartE2EDuration="1m19.936538424s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.936102161 +0000 UTC m=+99.503486241" watchObservedRunningTime="2025-10-13 13:09:04.936538424 +0000 UTC m=+99.503922494" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.941008 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c68351d4-280f-4da1-970a-2831cad1eada-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-sk6p2\" (UID: \"c68351d4-280f-4da1-970a-2831cad1eada\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:04 crc kubenswrapper[4684]: I1013 13:09:04.958604 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-5w59x" podStartSLOduration=78.958583615 podStartE2EDuration="1m18.958583615s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:04.958467442 +0000 UTC m=+99.525851522" watchObservedRunningTime="2025-10-13 13:09:04.958583615 +0000 UTC m=+99.525967685" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.010777 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=75.010760086 podStartE2EDuration="1m15.010760086s" podCreationTimestamp="2025-10-13 13:07:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:05.010336952 +0000 UTC m=+99.577721012" watchObservedRunningTime="2025-10-13 13:09:05.010760086 +0000 UTC m=+99.578144156" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.034487 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.065999 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.06597722 podStartE2EDuration="15.06597722s" podCreationTimestamp="2025-10-13 13:08:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:05.034848409 +0000 UTC m=+99.602232519" watchObservedRunningTime="2025-10-13 13:09:05.06597722 +0000 UTC m=+99.633361310" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.082757 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fgzqb" podStartSLOduration=79.082739968 podStartE2EDuration="1m19.082739968s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:05.082267983 +0000 UTC m=+99.649652093" watchObservedRunningTime="2025-10-13 13:09:05.082739968 +0000 UTC m=+99.650124048" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.097367 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.097346019 podStartE2EDuration="52.097346019s" podCreationTimestamp="2025-10-13 13:08:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:05.096354238 +0000 UTC m=+99.663738308" watchObservedRunningTime="2025-10-13 13:09:05.097346019 +0000 UTC m=+99.664730089" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.349638 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.349685 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:05 crc kubenswrapper[4684]: E1013 13:09:05.349784 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:05 crc kubenswrapper[4684]: E1013 13:09:05.349970 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.349652 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:05 crc kubenswrapper[4684]: E1013 13:09:05.350111 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.941030 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" event={"ID":"c68351d4-280f-4da1-970a-2831cad1eada","Type":"ContainerStarted","Data":"fe6e7b82dc2e8f28315c2d3d114dc05075707bda5d9da32e943634d06ebe3b3c"} Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.941083 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" event={"ID":"c68351d4-280f-4da1-970a-2831cad1eada","Type":"ContainerStarted","Data":"ba5d06b7a1a4e18d9e5fe8c818268be12295c4f1a433258704fbcc38753b119d"} Oct 13 13:09:05 crc kubenswrapper[4684]: I1013 13:09:05.955289 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-sk6p2" podStartSLOduration=79.955272805 podStartE2EDuration="1m19.955272805s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:05.953793399 +0000 UTC m=+100.521177499" watchObservedRunningTime="2025-10-13 13:09:05.955272805 +0000 UTC m=+100.522656875" Oct 13 13:09:06 crc kubenswrapper[4684]: I1013 13:09:06.350261 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:06 crc kubenswrapper[4684]: E1013 13:09:06.352241 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:07 crc kubenswrapper[4684]: I1013 13:09:07.350225 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:07 crc kubenswrapper[4684]: I1013 13:09:07.350225 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:07 crc kubenswrapper[4684]: E1013 13:09:07.350419 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:07 crc kubenswrapper[4684]: E1013 13:09:07.350523 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:07 crc kubenswrapper[4684]: I1013 13:09:07.350259 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:07 crc kubenswrapper[4684]: E1013 13:09:07.350677 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:08 crc kubenswrapper[4684]: I1013 13:09:08.350431 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:08 crc kubenswrapper[4684]: E1013 13:09:08.350598 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:09 crc kubenswrapper[4684]: I1013 13:09:09.350433 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:09 crc kubenswrapper[4684]: I1013 13:09:09.350441 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:09 crc kubenswrapper[4684]: I1013 13:09:09.350531 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:09 crc kubenswrapper[4684]: E1013 13:09:09.350774 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:09 crc kubenswrapper[4684]: E1013 13:09:09.350873 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:09 crc kubenswrapper[4684]: E1013 13:09:09.351006 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:10 crc kubenswrapper[4684]: I1013 13:09:10.350246 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:10 crc kubenswrapper[4684]: E1013 13:09:10.350466 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:11 crc kubenswrapper[4684]: I1013 13:09:11.349821 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:11 crc kubenswrapper[4684]: I1013 13:09:11.349823 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:11 crc kubenswrapper[4684]: I1013 13:09:11.350093 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:11 crc kubenswrapper[4684]: E1013 13:09:11.350306 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:11 crc kubenswrapper[4684]: E1013 13:09:11.350445 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:11 crc kubenswrapper[4684]: E1013 13:09:11.350663 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:12 crc kubenswrapper[4684]: I1013 13:09:12.349843 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:12 crc kubenswrapper[4684]: E1013 13:09:12.350009 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:12 crc kubenswrapper[4684]: I1013 13:09:12.351766 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:09:12 crc kubenswrapper[4684]: E1013 13:09:12.352051 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-9sq8c_openshift-ovn-kubernetes(9b180ad7-c68c-4234-9b7b-aa938e5ad590)\"" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" Oct 13 13:09:13 crc kubenswrapper[4684]: I1013 13:09:13.350105 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:13 crc kubenswrapper[4684]: I1013 13:09:13.350238 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:13 crc kubenswrapper[4684]: I1013 13:09:13.350597 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:13 crc kubenswrapper[4684]: E1013 13:09:13.350571 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:13 crc kubenswrapper[4684]: E1013 13:09:13.350719 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:13 crc kubenswrapper[4684]: E1013 13:09:13.350837 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:14 crc kubenswrapper[4684]: I1013 13:09:14.350337 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:14 crc kubenswrapper[4684]: E1013 13:09:14.350591 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:15 crc kubenswrapper[4684]: I1013 13:09:15.349756 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:15 crc kubenswrapper[4684]: E1013 13:09:15.349970 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:15 crc kubenswrapper[4684]: I1013 13:09:15.349785 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:15 crc kubenswrapper[4684]: E1013 13:09:15.350574 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:15 crc kubenswrapper[4684]: I1013 13:09:15.350402 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:15 crc kubenswrapper[4684]: E1013 13:09:15.350879 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:16 crc kubenswrapper[4684]: I1013 13:09:16.351575 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:16 crc kubenswrapper[4684]: E1013 13:09:16.351787 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:17 crc kubenswrapper[4684]: I1013 13:09:17.349944 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:17 crc kubenswrapper[4684]: I1013 13:09:17.350079 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:17 crc kubenswrapper[4684]: I1013 13:09:17.350005 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:17 crc kubenswrapper[4684]: E1013 13:09:17.350193 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:17 crc kubenswrapper[4684]: E1013 13:09:17.350445 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:17 crc kubenswrapper[4684]: E1013 13:09:17.350466 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:18 crc kubenswrapper[4684]: I1013 13:09:18.350228 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:18 crc kubenswrapper[4684]: E1013 13:09:18.350385 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:19 crc kubenswrapper[4684]: I1013 13:09:19.350242 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:19 crc kubenswrapper[4684]: I1013 13:09:19.350293 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:19 crc kubenswrapper[4684]: I1013 13:09:19.350320 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:19 crc kubenswrapper[4684]: E1013 13:09:19.350408 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:19 crc kubenswrapper[4684]: E1013 13:09:19.350515 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:19 crc kubenswrapper[4684]: E1013 13:09:19.350583 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:20 crc kubenswrapper[4684]: I1013 13:09:20.350766 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:20 crc kubenswrapper[4684]: E1013 13:09:20.351078 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.000064 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/1.log" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.001270 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/0.log" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.001347 4684 generic.go:334] "Generic (PLEG): container finished" podID="eb2c3381-fecf-46e7-a034-d3c560dff35e" containerID="d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c" exitCode=1 Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.001400 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerDied","Data":"d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c"} Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.001468 4684 scope.go:117] "RemoveContainer" containerID="41195da8e4f25c83ba01e382ab43bceb6e3ff830d7e04f8aebd28bef0515ed85" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.004208 4684 scope.go:117] "RemoveContainer" containerID="d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c" Oct 13 13:09:21 crc kubenswrapper[4684]: E1013 13:09:21.004582 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-r7wd2_openshift-multus(eb2c3381-fecf-46e7-a034-d3c560dff35e)\"" pod="openshift-multus/multus-r7wd2" podUID="eb2c3381-fecf-46e7-a034-d3c560dff35e" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.350031 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.350052 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:21 crc kubenswrapper[4684]: E1013 13:09:21.350635 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:21 crc kubenswrapper[4684]: I1013 13:09:21.350067 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:21 crc kubenswrapper[4684]: E1013 13:09:21.351104 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:21 crc kubenswrapper[4684]: E1013 13:09:21.351103 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:22 crc kubenswrapper[4684]: I1013 13:09:22.007008 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/1.log" Oct 13 13:09:22 crc kubenswrapper[4684]: I1013 13:09:22.350223 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:22 crc kubenswrapper[4684]: E1013 13:09:22.350457 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:23 crc kubenswrapper[4684]: I1013 13:09:23.350069 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:23 crc kubenswrapper[4684]: I1013 13:09:23.350142 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:23 crc kubenswrapper[4684]: E1013 13:09:23.350234 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:23 crc kubenswrapper[4684]: I1013 13:09:23.350304 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:23 crc kubenswrapper[4684]: E1013 13:09:23.350575 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:23 crc kubenswrapper[4684]: E1013 13:09:23.350680 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:24 crc kubenswrapper[4684]: I1013 13:09:24.350287 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:24 crc kubenswrapper[4684]: E1013 13:09:24.350414 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:25 crc kubenswrapper[4684]: I1013 13:09:25.350464 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:25 crc kubenswrapper[4684]: E1013 13:09:25.350705 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:25 crc kubenswrapper[4684]: I1013 13:09:25.351261 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:25 crc kubenswrapper[4684]: I1013 13:09:25.351324 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:25 crc kubenswrapper[4684]: E1013 13:09:25.351430 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:25 crc kubenswrapper[4684]: E1013 13:09:25.351551 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:26 crc kubenswrapper[4684]: E1013 13:09:26.316597 4684 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 13 13:09:26 crc kubenswrapper[4684]: I1013 13:09:26.350132 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:26 crc kubenswrapper[4684]: E1013 13:09:26.353752 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:26 crc kubenswrapper[4684]: E1013 13:09:26.467644 4684 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 13 13:09:27 crc kubenswrapper[4684]: I1013 13:09:27.350178 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:27 crc kubenswrapper[4684]: I1013 13:09:27.350194 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:27 crc kubenswrapper[4684]: E1013 13:09:27.350395 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:27 crc kubenswrapper[4684]: I1013 13:09:27.350219 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:27 crc kubenswrapper[4684]: E1013 13:09:27.350951 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:27 crc kubenswrapper[4684]: E1013 13:09:27.351055 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:27 crc kubenswrapper[4684]: I1013 13:09:27.351512 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:09:28 crc kubenswrapper[4684]: I1013 13:09:28.037292 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/3.log" Oct 13 13:09:28 crc kubenswrapper[4684]: I1013 13:09:28.040648 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerStarted","Data":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} Oct 13 13:09:28 crc kubenswrapper[4684]: I1013 13:09:28.041227 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:09:28 crc kubenswrapper[4684]: I1013 13:09:28.070830 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podStartSLOduration=102.07081129 podStartE2EDuration="1m42.07081129s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:28.070027445 +0000 UTC m=+122.637411535" watchObservedRunningTime="2025-10-13 13:09:28.07081129 +0000 UTC m=+122.638195360" Oct 13 13:09:28 crc kubenswrapper[4684]: I1013 13:09:28.258421 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-mlkgd"] Oct 13 13:09:28 crc kubenswrapper[4684]: I1013 13:09:28.258553 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:28 crc kubenswrapper[4684]: E1013 13:09:28.258683 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:29 crc kubenswrapper[4684]: I1013 13:09:29.350355 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:29 crc kubenswrapper[4684]: I1013 13:09:29.350437 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:29 crc kubenswrapper[4684]: E1013 13:09:29.350932 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:29 crc kubenswrapper[4684]: I1013 13:09:29.350461 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:29 crc kubenswrapper[4684]: E1013 13:09:29.351070 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:29 crc kubenswrapper[4684]: E1013 13:09:29.351329 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:30 crc kubenswrapper[4684]: I1013 13:09:30.349723 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:30 crc kubenswrapper[4684]: E1013 13:09:30.350450 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:31 crc kubenswrapper[4684]: I1013 13:09:31.349855 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:31 crc kubenswrapper[4684]: I1013 13:09:31.349855 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:31 crc kubenswrapper[4684]: E1013 13:09:31.350049 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:31 crc kubenswrapper[4684]: E1013 13:09:31.350143 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:31 crc kubenswrapper[4684]: I1013 13:09:31.350725 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:31 crc kubenswrapper[4684]: E1013 13:09:31.350981 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:31 crc kubenswrapper[4684]: E1013 13:09:31.468890 4684 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 13 13:09:32 crc kubenswrapper[4684]: I1013 13:09:32.350721 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:32 crc kubenswrapper[4684]: E1013 13:09:32.351979 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:32 crc kubenswrapper[4684]: I1013 13:09:32.352264 4684 scope.go:117] "RemoveContainer" containerID="d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c" Oct 13 13:09:33 crc kubenswrapper[4684]: I1013 13:09:33.066506 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/1.log" Oct 13 13:09:33 crc kubenswrapper[4684]: I1013 13:09:33.066857 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerStarted","Data":"623bea441e3569a23040732a3943af02bd10ca1a80181e4ccb673180bd26774c"} Oct 13 13:09:33 crc kubenswrapper[4684]: I1013 13:09:33.349894 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:33 crc kubenswrapper[4684]: I1013 13:09:33.349973 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:33 crc kubenswrapper[4684]: I1013 13:09:33.349924 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:33 crc kubenswrapper[4684]: E1013 13:09:33.350081 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:33 crc kubenswrapper[4684]: E1013 13:09:33.350162 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:33 crc kubenswrapper[4684]: E1013 13:09:33.350301 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:34 crc kubenswrapper[4684]: I1013 13:09:34.350385 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:34 crc kubenswrapper[4684]: E1013 13:09:34.350653 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:35 crc kubenswrapper[4684]: I1013 13:09:35.350652 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:35 crc kubenswrapper[4684]: E1013 13:09:35.350816 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 13:09:35 crc kubenswrapper[4684]: I1013 13:09:35.350885 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:35 crc kubenswrapper[4684]: E1013 13:09:35.351120 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 13:09:35 crc kubenswrapper[4684]: I1013 13:09:35.351394 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:35 crc kubenswrapper[4684]: E1013 13:09:35.351522 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 13:09:36 crc kubenswrapper[4684]: I1013 13:09:36.350015 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:36 crc kubenswrapper[4684]: E1013 13:09:36.351022 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mlkgd" podUID="9150445c-49fc-46c8-b101-d672f0485cbb" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.350454 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.350554 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.350868 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.354439 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.354519 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.354608 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 13 13:09:37 crc kubenswrapper[4684]: I1013 13:09:37.354805 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 13 13:09:38 crc kubenswrapper[4684]: I1013 13:09:38.349987 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:09:38 crc kubenswrapper[4684]: I1013 13:09:38.353090 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 13 13:09:38 crc kubenswrapper[4684]: I1013 13:09:38.355129 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.887545 4684 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.945868 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2ws7t"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.946572 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.949247 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8xvn2"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.951088 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.956020 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-87shb"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.956791 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.957173 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.957586 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.961891 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.962568 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.963415 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.964279 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.968260 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-l6c9m"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.968806 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.969686 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.971120 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.971168 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.976394 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.978207 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.978753 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.979158 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fg2dj"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.979509 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8vg72"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.979984 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.980286 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.980446 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.980779 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.981129 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.981459 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.995457 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.995856 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996184 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996339 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996460 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996608 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996814 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996865 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.996981 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997052 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997144 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997192 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997332 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997355 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997397 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997487 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997583 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997596 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997703 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.997783 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.999102 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.999347 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kxhdp"] Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.999500 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.999698 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 13 13:09:45 crc kubenswrapper[4684]: I1013 13:09:45.999955 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.000233 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.000525 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.001032 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.002954 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.003498 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.003682 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.003823 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.004388 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.004496 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.004681 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.004840 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.004964 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.004978 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.005070 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.005188 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.005216 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.006957 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007103 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007199 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007284 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007383 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007502 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007596 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007700 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.007823 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.008063 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.008268 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.008985 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.011341 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.011523 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.027093 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.028239 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.029051 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.029404 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.030192 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-rzq52"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.030755 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.049630 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.050131 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.050384 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.050841 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.050952 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.051678 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.051771 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.053416 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.054530 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.054577 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.054773 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055100 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055143 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055281 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055484 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055573 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055680 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055698 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055848 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055864 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pcpzg"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.055982 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.056327 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.056594 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.056651 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.056674 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057441 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057523 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057560 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057637 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057651 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057528 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057809 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.057983 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.063840 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.064212 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.066572 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.068764 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.069592 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.074285 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.074824 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.075276 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.075844 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qm5mj"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.076007 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.076393 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.076659 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.077189 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.077344 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.077380 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.077572 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.077946 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.078121 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.078604 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.078864 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.098813 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-j679w"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.099134 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.100210 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.101721 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.104981 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fff30902-ec45-47b2-a9ec-e984e1f2b240-auth-proxy-config\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105029 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105134 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-dir\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105165 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105187 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c809c1a6-f445-4257-aa0e-64e8e8e9484a-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105207 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c0bec8b-2056-437f-aafe-dc4194b467df-config\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105224 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c0bec8b-2056-437f-aafe-dc4194b467df-serving-cert\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105241 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-etcd-client\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105345 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-trusted-ca-bundle\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105369 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105386 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-service-ca\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105407 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-serving-cert\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105425 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-config\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105448 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa90c071-3247-46ed-a635-b234d452ae89-serving-cert\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105546 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-etcd-client\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105566 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-audit\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105585 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105613 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh8dn\" (UniqueName: \"kubernetes.io/projected/ce48c4a9-ae90-4159-935c-911dea34cac1-kube-api-access-dh8dn\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105632 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfnqx\" (UniqueName: \"kubernetes.io/projected/dd59bf2a-e617-424e-857c-2f7b94fbb743-kube-api-access-nfnqx\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105652 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4lct\" (UniqueName: \"kubernetes.io/projected/aa90c071-3247-46ed-a635-b234d452ae89-kube-api-access-b4lct\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105671 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf558883-5672-46d6-9d8a-a08070751a86-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105689 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105711 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88qxv\" (UniqueName: \"kubernetes.io/projected/ccfbd45c-9af2-4a8d-904e-da0f0816bc86-kube-api-access-88qxv\") pod \"cluster-samples-operator-665b6dd947-x95d5\" (UID: \"ccfbd45c-9af2-4a8d-904e-da0f0816bc86\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105730 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80e4e01f-7138-4a37-adab-30201bc0289d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105755 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwpv7\" (UniqueName: \"kubernetes.io/projected/9c0bec8b-2056-437f-aafe-dc4194b467df-kube-api-access-mwpv7\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105774 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-oauth-serving-cert\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105796 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-service-ca-bundle\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.105823 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.106321 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd59bf2a-e617-424e-857c-2f7b94fbb743-serving-cert\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.111638 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.111666 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.112923 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.112962 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113013 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113035 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80e4e01f-7138-4a37-adab-30201bc0289d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113056 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbs4n\" (UniqueName: \"kubernetes.io/projected/7d71377c-822f-4d43-8d79-9d1e6ccdb340-kube-api-access-rbs4n\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113074 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c809c1a6-f445-4257-aa0e-64e8e8e9484a-serving-cert\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113496 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113515 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113528 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113669 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-audit-policies\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113788 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ce48c4a9-ae90-4159-935c-911dea34cac1-audit-dir\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113888 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-policies\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.113982 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-encryption-config\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114090 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-config\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114144 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114429 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlsc2\" (UniqueName: \"kubernetes.io/projected/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-kube-api-access-rlsc2\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114586 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114667 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114746 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-serving-cert\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114790 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114827 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114854 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbkfj\" (UniqueName: \"kubernetes.io/projected/d7687923-06cc-47ec-98c9-5c7c9862d6a2-kube-api-access-wbkfj\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114879 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c0bec8b-2056-437f-aafe-dc4194b467df-trusted-ca\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114902 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114935 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-client-ca\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114955 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d71377c-822f-4d43-8d79-9d1e6ccdb340-audit-dir\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.114971 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-oauth-config\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115035 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ce48c4a9-ae90-4159-935c-911dea34cac1-node-pullsecrets\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115062 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-image-import-ca\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115082 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-config\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115120 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn6wp\" (UniqueName: \"kubernetes.io/projected/80e4e01f-7138-4a37-adab-30201bc0289d-kube-api-access-fn6wp\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115146 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bf558883-5672-46d6-9d8a-a08070751a86-images\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115162 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-serving-cert\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115181 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115217 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7687923-06cc-47ec-98c9-5c7c9862d6a2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115238 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115293 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fff30902-ec45-47b2-a9ec-e984e1f2b240-config\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115336 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwzg4\" (UniqueName: \"kubernetes.io/projected/fff30902-ec45-47b2-a9ec-e984e1f2b240-kube-api-access-zwzg4\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115373 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-config\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115389 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7tjp\" (UniqueName: \"kubernetes.io/projected/bf558883-5672-46d6-9d8a-a08070751a86-kube-api-access-s7tjp\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115406 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-encryption-config\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115427 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d7687923-06cc-47ec-98c9-5c7c9862d6a2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115443 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-etcd-serving-ca\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115467 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-config\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115487 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-client-ca\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115504 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7687923-06cc-47ec-98c9-5c7c9862d6a2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115523 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-927dd\" (UniqueName: \"kubernetes.io/projected/c809c1a6-f445-4257-aa0e-64e8e8e9484a-kube-api-access-927dd\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115539 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ccfbd45c-9af2-4a8d-904e-da0f0816bc86-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-x95d5\" (UID: \"ccfbd45c-9af2-4a8d-904e-da0f0816bc86\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115554 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fff30902-ec45-47b2-a9ec-e984e1f2b240-machine-approver-tls\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115571 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg2kw\" (UniqueName: \"kubernetes.io/projected/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-kube-api-access-pg2kw\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115587 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-serving-cert\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115605 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115638 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wm5b\" (UniqueName: \"kubernetes.io/projected/ba1678a8-b5a0-491d-9531-a18c9500d4a3-kube-api-access-4wm5b\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.115654 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf558883-5672-46d6-9d8a-a08070751a86-config\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.116830 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.117144 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.117319 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.117712 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.117884 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.120207 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.120796 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.121240 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-bp24t"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.121829 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.123019 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-mzb8w"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.123673 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.124057 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.124535 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.125595 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.126180 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.126536 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.127103 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.127867 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.128926 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tg4h7"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.129297 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.130354 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.131198 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.132002 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.132615 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.133240 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.133391 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2ws7t"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.134685 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fg2dj"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.135943 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-87shb"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.137176 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rzq52"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.139003 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.140242 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s86bq"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.140664 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.141565 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8xvn2"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.143261 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-l6c9m"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.149252 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.150137 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.151724 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pcpzg"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.154518 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.157532 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-qlmbp"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.157759 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.158664 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.159947 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.163247 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.165082 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.167859 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.169665 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.173345 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-lk6sg"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.174533 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.183454 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.184608 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8vg72"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.185692 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.187619 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.188110 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.189305 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.189468 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.190646 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.191683 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-j679w"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.192962 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-bp24t"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.194251 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-5qbxw"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.195141 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.196821 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.198927 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.200220 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.201847 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kc5f6"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.202771 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.203949 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qm5mj"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.205055 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.206226 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tg4h7"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.207374 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.208601 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.209020 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.211612 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kc5f6"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.211726 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.212732 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kxhdp"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.214010 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.214822 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-lk6sg"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.215850 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s86bq"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.216390 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c139ab7-d464-4e3c-877e-5f41ae042c0e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.216564 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.216681 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-dir\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.216777 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa43346f-4d23-4134-b1cb-a69926b2cc12-proxy-tls\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.216889 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fff30902-ec45-47b2-a9ec-e984e1f2b240-auth-proxy-config\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217008 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c809c1a6-f445-4257-aa0e-64e8e8e9484a-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217092 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217190 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c0bec8b-2056-437f-aafe-dc4194b467df-config\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217979 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218012 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c0bec8b-2056-437f-aafe-dc4194b467df-serving-cert\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218030 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-etcd-client\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218047 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-trusted-ca-bundle\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217519 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c809c1a6-f445-4257-aa0e-64e8e8e9484a-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218067 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c139ab7-d464-4e3c-877e-5f41ae042c0e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218093 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217050 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218137 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-service-ca\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218225 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-serving-cert\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218252 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-config\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218281 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7042677-5180-4f8b-9bde-40d53c672f5a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218311 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa90c071-3247-46ed-a635-b234d452ae89-serving-cert\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218334 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-etcd-client\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218357 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-audit\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218381 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218401 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh8dn\" (UniqueName: \"kubernetes.io/projected/ce48c4a9-ae90-4159-935c-911dea34cac1-kube-api-access-dh8dn\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218419 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfnqx\" (UniqueName: \"kubernetes.io/projected/dd59bf2a-e617-424e-857c-2f7b94fbb743-kube-api-access-nfnqx\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218437 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f479582-d10a-42c3-b8cc-46740db85fd1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218458 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa43346f-4d23-4134-b1cb-a69926b2cc12-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218479 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4lct\" (UniqueName: \"kubernetes.io/projected/aa90c071-3247-46ed-a635-b234d452ae89-kube-api-access-b4lct\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218500 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf558883-5672-46d6-9d8a-a08070751a86-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218519 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218536 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88qxv\" (UniqueName: \"kubernetes.io/projected/ccfbd45c-9af2-4a8d-904e-da0f0816bc86-kube-api-access-88qxv\") pod \"cluster-samples-operator-665b6dd947-x95d5\" (UID: \"ccfbd45c-9af2-4a8d-904e-da0f0816bc86\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218553 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwpv7\" (UniqueName: \"kubernetes.io/projected/9c0bec8b-2056-437f-aafe-dc4194b467df-kube-api-access-mwpv7\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218573 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-oauth-serving-cert\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218590 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7042677-5180-4f8b-9bde-40d53c672f5a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218607 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80e4e01f-7138-4a37-adab-30201bc0289d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218627 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-service-ca-bundle\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218643 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218673 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd59bf2a-e617-424e-857c-2f7b94fbb743-serving-cert\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218690 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218723 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218734 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218741 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80e4e01f-7138-4a37-adab-30201bc0289d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218782 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbs4n\" (UniqueName: \"kubernetes.io/projected/7d71377c-822f-4d43-8d79-9d1e6ccdb340-kube-api-access-rbs4n\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218801 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c809c1a6-f445-4257-aa0e-64e8e8e9484a-serving-cert\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218822 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-audit-policies\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218847 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-policies\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218863 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-encryption-config\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218877 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ce48c4a9-ae90-4159-935c-911dea34cac1-audit-dir\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218879 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-service-ca\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218916 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-config\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218933 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlsc2\" (UniqueName: \"kubernetes.io/projected/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-kube-api-access-rlsc2\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218953 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-config\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.218981 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-ca\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219015 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2fjl\" (UniqueName: \"kubernetes.io/projected/ba0003c9-5951-4b74-b146-38011315db63-kube-api-access-h2fjl\") pod \"downloads-7954f5f757-rzq52\" (UID: \"ba0003c9-5951-4b74-b146-38011315db63\") " pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219035 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f479582-d10a-42c3-b8cc-46740db85fd1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219051 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f479582-d10a-42c3-b8cc-46740db85fd1-config\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219071 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219089 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-serving-cert\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219105 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn924\" (UniqueName: \"kubernetes.io/projected/fa43346f-4d23-4134-b1cb-a69926b2cc12-kube-api-access-rn924\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219123 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219140 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219158 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217579 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.217646 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fff30902-ec45-47b2-a9ec-e984e1f2b240-auth-proxy-config\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219217 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c0bec8b-2056-437f-aafe-dc4194b467df-trusted-ca\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219242 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219259 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219291 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxq6v\" (UniqueName: \"kubernetes.io/projected/a7042677-5180-4f8b-9bde-40d53c672f5a-kube-api-access-sxq6v\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219321 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-client-ca\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219359 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbkfj\" (UniqueName: \"kubernetes.io/projected/d7687923-06cc-47ec-98c9-5c7c9862d6a2-kube-api-access-wbkfj\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219387 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d71377c-822f-4d43-8d79-9d1e6ccdb340-audit-dir\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219411 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-oauth-config\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219433 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd8628a7-0787-48d4-a4fe-43323245b766-serving-cert\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219465 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ce48c4a9-ae90-4159-935c-911dea34cac1-node-pullsecrets\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219490 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-image-import-ca\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219515 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-config\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219540 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn6wp\" (UniqueName: \"kubernetes.io/projected/80e4e01f-7138-4a37-adab-30201bc0289d-kube-api-access-fn6wp\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219565 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bf558883-5672-46d6-9d8a-a08070751a86-images\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219589 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-serving-cert\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219612 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219635 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7687923-06cc-47ec-98c9-5c7c9862d6a2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219661 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219700 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fff30902-ec45-47b2-a9ec-e984e1f2b240-config\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219725 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c139ab7-d464-4e3c-877e-5f41ae042c0e-config\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219749 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-client\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219784 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwzg4\" (UniqueName: \"kubernetes.io/projected/fff30902-ec45-47b2-a9ec-e984e1f2b240-kube-api-access-zwzg4\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219809 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-config\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219838 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7tjp\" (UniqueName: \"kubernetes.io/projected/bf558883-5672-46d6-9d8a-a08070751a86-kube-api-access-s7tjp\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219846 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qlmbp"] Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219864 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-encryption-config\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219889 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d7687923-06cc-47ec-98c9-5c7c9862d6a2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219939 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-etcd-serving-ca\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219965 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-client-ca\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219988 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7687923-06cc-47ec-98c9-5c7c9862d6a2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220014 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-927dd\" (UniqueName: \"kubernetes.io/projected/c809c1a6-f445-4257-aa0e-64e8e8e9484a-kube-api-access-927dd\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.219032 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-trusted-ca-bundle\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220040 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ccfbd45c-9af2-4a8d-904e-da0f0816bc86-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-x95d5\" (UID: \"ccfbd45c-9af2-4a8d-904e-da0f0816bc86\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220064 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fff30902-ec45-47b2-a9ec-e984e1f2b240-machine-approver-tls\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220088 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-config\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220113 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg2kw\" (UniqueName: \"kubernetes.io/projected/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-kube-api-access-pg2kw\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220138 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-serving-cert\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220161 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220185 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wm5b\" (UniqueName: \"kubernetes.io/projected/ba1678a8-b5a0-491d-9531-a18c9500d4a3-kube-api-access-4wm5b\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220208 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220231 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw79d\" (UniqueName: \"kubernetes.io/projected/6f6919bc-7475-4569-a9da-e72d185ed9a8-kube-api-access-mw79d\") pod \"migrator-59844c95c7-l52n6\" (UID: \"6f6919bc-7475-4569-a9da-e72d185ed9a8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220253 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf558883-5672-46d6-9d8a-a08070751a86-config\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220276 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-service-ca\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220298 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwgjx\" (UniqueName: \"kubernetes.io/projected/cd8628a7-0787-48d4-a4fe-43323245b766-kube-api-access-gwgjx\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220319 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220329 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-audit-policies\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.220415 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ce48c4a9-ae90-4159-935c-911dea34cac1-audit-dir\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.221160 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-config\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.216737 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-dir\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.221981 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c0bec8b-2056-437f-aafe-dc4194b467df-config\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.222455 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c0bec8b-2056-437f-aafe-dc4194b467df-trusted-ca\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.222573 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d71377c-822f-4d43-8d79-9d1e6ccdb340-audit-dir\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.223159 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-policies\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.223897 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf558883-5672-46d6-9d8a-a08070751a86-config\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.224021 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-etcd-client\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.224391 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fff30902-ec45-47b2-a9ec-e984e1f2b240-config\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.224565 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.224953 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.225449 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-encryption-config\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.225527 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-audit\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.226038 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-config\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.226150 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.226702 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-config\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.226875 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-etcd-serving-ca\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227120 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d7687923-06cc-47ec-98c9-5c7c9862d6a2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227192 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-oauth-serving-cert\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227330 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ce48c4a9-ae90-4159-935c-911dea34cac1-node-pullsecrets\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227374 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c0bec8b-2056-437f-aafe-dc4194b467df-serving-cert\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227569 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-service-ca-bundle\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227633 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80e4e01f-7138-4a37-adab-30201bc0289d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.227729 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-client-ca\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.228078 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.228493 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bf558883-5672-46d6-9d8a-a08070751a86-images\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.228762 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.228790 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-serving-cert\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.228788 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d71377c-822f-4d43-8d79-9d1e6ccdb340-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.228977 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-config\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.229044 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd59bf2a-e617-424e-857c-2f7b94fbb743-config\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.229138 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.229225 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-client-ca\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.229377 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf558883-5672-46d6-9d8a-a08070751a86-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.229421 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ce48c4a9-ae90-4159-935c-911dea34cac1-image-import-ca\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.229866 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/fff30902-ec45-47b2-a9ec-e984e1f2b240-machine-approver-tls\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.230555 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.230589 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c809c1a6-f445-4257-aa0e-64e8e8e9484a-serving-cert\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.230699 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-oauth-config\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.230714 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80e4e01f-7138-4a37-adab-30201bc0289d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.231122 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.231329 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd59bf2a-e617-424e-857c-2f7b94fbb743-serving-cert\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.231956 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-etcd-client\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232226 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232388 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d7687923-06cc-47ec-98c9-5c7c9862d6a2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232467 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232483 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce48c4a9-ae90-4159-935c-911dea34cac1-serving-cert\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232553 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ccfbd45c-9af2-4a8d-904e-da0f0816bc86-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-x95d5\" (UID: \"ccfbd45c-9af2-4a8d-904e-da0f0816bc86\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.233127 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232862 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-serving-cert\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.233181 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.233117 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.232794 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa90c071-3247-46ed-a635-b234d452ae89-serving-cert\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.233289 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-serving-cert\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.233579 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.243364 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7d71377c-822f-4d43-8d79-9d1e6ccdb340-encryption-config\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.248402 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.269149 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.289545 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.308935 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321168 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-config\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321308 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-ca\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321457 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2fjl\" (UniqueName: \"kubernetes.io/projected/ba0003c9-5951-4b74-b146-38011315db63-kube-api-access-h2fjl\") pod \"downloads-7954f5f757-rzq52\" (UID: \"ba0003c9-5951-4b74-b146-38011315db63\") " pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321606 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f479582-d10a-42c3-b8cc-46740db85fd1-config\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321706 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f479582-d10a-42c3-b8cc-46740db85fd1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321816 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn924\" (UniqueName: \"kubernetes.io/projected/fa43346f-4d23-4134-b1cb-a69926b2cc12-kube-api-access-rn924\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321939 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxq6v\" (UniqueName: \"kubernetes.io/projected/a7042677-5180-4f8b-9bde-40d53c672f5a-kube-api-access-sxq6v\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322074 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd8628a7-0787-48d4-a4fe-43323245b766-serving-cert\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322197 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c139ab7-d464-4e3c-877e-5f41ae042c0e-config\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322291 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-client\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322434 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.321882 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-config\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322554 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw79d\" (UniqueName: \"kubernetes.io/projected/6f6919bc-7475-4569-a9da-e72d185ed9a8-kube-api-access-mw79d\") pod \"migrator-59844c95c7-l52n6\" (UID: \"6f6919bc-7475-4569-a9da-e72d185ed9a8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322733 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-service-ca\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322829 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwgjx\" (UniqueName: \"kubernetes.io/projected/cd8628a7-0787-48d4-a4fe-43323245b766-kube-api-access-gwgjx\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.322978 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323095 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c139ab7-d464-4e3c-877e-5f41ae042c0e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323193 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa43346f-4d23-4134-b1cb-a69926b2cc12-proxy-tls\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323012 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-ca\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323044 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f479582-d10a-42c3-b8cc-46740db85fd1-config\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323298 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323527 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c139ab7-d464-4e3c-877e-5f41ae042c0e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323629 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7042677-5180-4f8b-9bde-40d53c672f5a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323759 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f479582-d10a-42c3-b8cc-46740db85fd1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323891 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa43346f-4d23-4134-b1cb-a69926b2cc12-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.324093 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7042677-5180-4f8b-9bde-40d53c672f5a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.323560 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-service-ca\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.324259 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.324596 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f479582-d10a-42c3-b8cc-46740db85fd1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.324799 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa43346f-4d23-4134-b1cb-a69926b2cc12-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.326127 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd8628a7-0787-48d4-a4fe-43323245b766-serving-cert\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.326279 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd8628a7-0787-48d4-a4fe-43323245b766-etcd-client\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.326961 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.329378 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.349207 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.369310 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.388877 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.408956 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.428828 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.449367 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.457654 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa43346f-4d23-4134-b1cb-a69926b2cc12-proxy-tls\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.469074 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.489579 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.508412 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.518956 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7042677-5180-4f8b-9bde-40d53c672f5a-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.528811 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.534634 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7042677-5180-4f8b-9bde-40d53c672f5a-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.550244 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.589621 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.598742 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c139ab7-d464-4e3c-877e-5f41ae042c0e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.609386 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.629729 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.633370 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c139ab7-d464-4e3c-877e-5f41ae042c0e-config\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.649161 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.669870 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.690407 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.710366 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.730231 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.749244 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.769316 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.790028 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.809868 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.830516 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.849288 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.868848 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.889765 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.909544 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.950360 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.970019 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 13 13:09:46 crc kubenswrapper[4684]: I1013 13:09:46.989654 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.010307 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.029959 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.049388 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.070468 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.090974 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.110672 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.127274 4684 request.go:700] Waited for 1.003288287s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-metrics-certs-default&limit=500&resourceVersion=0 Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.129515 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.149028 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.169098 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.189590 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.209749 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.230951 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.249739 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.270370 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.290021 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.318434 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.329453 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.349316 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.368389 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.391245 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.416381 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.430003 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.449059 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.470198 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.489333 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.509461 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.530017 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.549734 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.569846 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.589414 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.609113 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.630316 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.650319 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.669790 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.689850 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.709723 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.729602 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.749178 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.769448 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.789749 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.809705 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.830628 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.848759 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.869804 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.889570 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.908464 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.929921 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.949963 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.969601 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 13 13:09:47 crc kubenswrapper[4684]: I1013 13:09:47.989027 4684 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.009102 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.044856 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbs4n\" (UniqueName: \"kubernetes.io/projected/7d71377c-822f-4d43-8d79-9d1e6ccdb340-kube-api-access-rbs4n\") pod \"apiserver-7bbb656c7d-mwvrs\" (UID: \"7d71377c-822f-4d43-8d79-9d1e6ccdb340\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.064335 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4lct\" (UniqueName: \"kubernetes.io/projected/aa90c071-3247-46ed-a635-b234d452ae89-kube-api-access-b4lct\") pod \"route-controller-manager-6576b87f9c-c8ww6\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.089916 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlsc2\" (UniqueName: \"kubernetes.io/projected/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-kube-api-access-rlsc2\") pod \"console-f9d7485db-8vg72\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.107685 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbkfj\" (UniqueName: \"kubernetes.io/projected/d7687923-06cc-47ec-98c9-5c7c9862d6a2-kube-api-access-wbkfj\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.127801 4684 request.go:700] Waited for 1.903747973s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa/token Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.131426 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.136170 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfnqx\" (UniqueName: \"kubernetes.io/projected/dd59bf2a-e617-424e-857c-2f7b94fbb743-kube-api-access-nfnqx\") pod \"authentication-operator-69f744f599-l6c9m\" (UID: \"dd59bf2a-e617-424e-857c-2f7b94fbb743\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.143941 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg2kw\" (UniqueName: \"kubernetes.io/projected/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-kube-api-access-pg2kw\") pod \"controller-manager-879f6c89f-2ws7t\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.166070 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-927dd\" (UniqueName: \"kubernetes.io/projected/c809c1a6-f445-4257-aa0e-64e8e8e9484a-kube-api-access-927dd\") pod \"openshift-config-operator-7777fb866f-xqrpk\" (UID: \"c809c1a6-f445-4257-aa0e-64e8e8e9484a\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.182131 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwzg4\" (UniqueName: \"kubernetes.io/projected/fff30902-ec45-47b2-a9ec-e984e1f2b240-kube-api-access-zwzg4\") pod \"machine-approver-56656f9798-8g6fg\" (UID: \"fff30902-ec45-47b2-a9ec-e984e1f2b240\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.202291 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh8dn\" (UniqueName: \"kubernetes.io/projected/ce48c4a9-ae90-4159-935c-911dea34cac1-kube-api-access-dh8dn\") pod \"apiserver-76f77b778f-8xvn2\" (UID: \"ce48c4a9-ae90-4159-935c-911dea34cac1\") " pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.212682 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.217215 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.225779 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wm5b\" (UniqueName: \"kubernetes.io/projected/ba1678a8-b5a0-491d-9531-a18c9500d4a3-kube-api-access-4wm5b\") pod \"oauth-openshift-558db77b4-kxhdp\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.231665 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.247646 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.252533 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn6wp\" (UniqueName: \"kubernetes.io/projected/80e4e01f-7138-4a37-adab-30201bc0289d-kube-api-access-fn6wp\") pod \"openshift-apiserver-operator-796bbdcf4f-fzt79\" (UID: \"80e4e01f-7138-4a37-adab-30201bc0289d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.259741 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.266635 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d7687923-06cc-47ec-98c9-5c7c9862d6a2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rdpl6\" (UID: \"d7687923-06cc-47ec-98c9-5c7c9862d6a2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.270974 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.282863 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.289788 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88qxv\" (UniqueName: \"kubernetes.io/projected/ccfbd45c-9af2-4a8d-904e-da0f0816bc86-kube-api-access-88qxv\") pod \"cluster-samples-operator-665b6dd947-x95d5\" (UID: \"ccfbd45c-9af2-4a8d-904e-da0f0816bc86\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.306097 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7tjp\" (UniqueName: \"kubernetes.io/projected/bf558883-5672-46d6-9d8a-a08070751a86-kube-api-access-s7tjp\") pod \"machine-api-operator-5694c8668f-87shb\" (UID: \"bf558883-5672-46d6-9d8a-a08070751a86\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.333438 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwpv7\" (UniqueName: \"kubernetes.io/projected/9c0bec8b-2056-437f-aafe-dc4194b467df-kube-api-access-mwpv7\") pod \"console-operator-58897d9998-fg2dj\" (UID: \"9c0bec8b-2056-437f-aafe-dc4194b467df\") " pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.352258 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2fjl\" (UniqueName: \"kubernetes.io/projected/ba0003c9-5951-4b74-b146-38011315db63-kube-api-access-h2fjl\") pod \"downloads-7954f5f757-rzq52\" (UID: \"ba0003c9-5951-4b74-b146-38011315db63\") " pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.366041 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.366502 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn924\" (UniqueName: \"kubernetes.io/projected/fa43346f-4d23-4134-b1cb-a69926b2cc12-kube-api-access-rn924\") pod \"machine-config-controller-84d6567774-9m7v7\" (UID: \"fa43346f-4d23-4134-b1cb-a69926b2cc12\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.382375 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.383556 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.391710 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b722e16a-9b60-4d7e-84ec-17f4e70cc3bb-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4lk4t\" (UID: \"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.399178 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.405763 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.413980 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxq6v\" (UniqueName: \"kubernetes.io/projected/a7042677-5180-4f8b-9bde-40d53c672f5a-kube-api-access-sxq6v\") pod \"kube-storage-version-migrator-operator-b67b599dd-4bdxl\" (UID: \"a7042677-5180-4f8b-9bde-40d53c672f5a\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.425706 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw79d\" (UniqueName: \"kubernetes.io/projected/6f6919bc-7475-4569-a9da-e72d185ed9a8-kube-api-access-mw79d\") pod \"migrator-59844c95c7-l52n6\" (UID: \"6f6919bc-7475-4569-a9da-e72d185ed9a8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.443264 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.446136 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwgjx\" (UniqueName: \"kubernetes.io/projected/cd8628a7-0787-48d4-a4fe-43323245b766-kube-api-access-gwgjx\") pod \"etcd-operator-b45778765-pcpzg\" (UID: \"cd8628a7-0787-48d4-a4fe-43323245b766\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.470335 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c139ab7-d464-4e3c-877e-5f41ae042c0e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8fv47\" (UID: \"5c139ab7-d464-4e3c-877e-5f41ae042c0e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.488394 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f479582-d10a-42c3-b8cc-46740db85fd1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xk62k\" (UID: \"0f479582-d10a-42c3-b8cc-46740db85fd1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.521083 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" Oct 13 13:09:48 crc kubenswrapper[4684]: W1013 13:09:48.527711 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa90c071_3247_46ed_a635_b234d452ae89.slice/crio-9e698b308dde4faf1f7e312625eb2fa324a460c564e01a94e839da52e4309201 WatchSource:0}: Error finding container 9e698b308dde4faf1f7e312625eb2fa324a460c564e01a94e839da52e4309201: Status 404 returned error can't find the container with id 9e698b308dde4faf1f7e312625eb2fa324a460c564e01a94e839da52e4309201 Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555048 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvjg2\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-kube-api-access-nvjg2\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555087 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-tls\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555109 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-certificates\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555149 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6db917b4-84de-4bd5-947f-5a2f7049f2ad-metrics-tls\") pod \"dns-operator-744455d44c-j679w\" (UID: \"6db917b4-84de-4bd5-947f-5a2f7049f2ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555181 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7af13a9-fc75-4bdb-931f-b68bb3813c09-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555246 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555271 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7af13a9-fc75-4bdb-931f-b68bb3813c09-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555288 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-bound-sa-token\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555348 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-trusted-ca\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.555372 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnp5m\" (UniqueName: \"kubernetes.io/projected/6db917b4-84de-4bd5-947f-5a2f7049f2ad-kube-api-access-tnp5m\") pod \"dns-operator-744455d44c-j679w\" (UID: \"6db917b4-84de-4bd5-947f-5a2f7049f2ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:48 crc kubenswrapper[4684]: E1013 13:09:48.557201 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.057188734 +0000 UTC m=+143.624572804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.559384 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-l6c9m"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.563741 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.589749 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:09:48 crc kubenswrapper[4684]: W1013 13:09:48.593180 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd59bf2a_e617_424e_857c_2f7b94fbb743.slice/crio-abd0ac76cf15c964e421c8c6d4c4a65bf51923893f308f53ae3661e0eca578c8 WatchSource:0}: Error finding container abd0ac76cf15c964e421c8c6d4c4a65bf51923893f308f53ae3661e0eca578c8: Status 404 returned error can't find the container with id abd0ac76cf15c964e421c8c6d4c4a65bf51923893f308f53ae3661e0eca578c8 Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.596387 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.604142 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.610623 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.615785 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.622671 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.657354 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:48 crc kubenswrapper[4684]: E1013 13:09:48.657754 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.157720144 +0000 UTC m=+143.725104224 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659009 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pctl4\" (UniqueName: \"kubernetes.io/projected/efc8668a-031f-436e-838e-0e8f9675f125-kube-api-access-pctl4\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659067 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-tmpfs\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659094 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-webhook-cert\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659152 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-certificates\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659196 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6db917b4-84de-4bd5-947f-5a2f7049f2ad-metrics-tls\") pod \"dns-operator-744455d44c-j679w\" (UID: \"6db917b4-84de-4bd5-947f-5a2f7049f2ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659217 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8365d2be-3472-44d1-baf3-34b41918bbf5-serving-cert\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659257 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/576d39b9-9426-45dc-a2c7-c2d886150998-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659288 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-registration-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659309 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqmpj\" (UniqueName: \"kubernetes.io/projected/c6f0057b-5a7e-4810-a4a6-054ebd857da3-kube-api-access-qqmpj\") pod \"package-server-manager-789f6589d5-59crv\" (UID: \"c6f0057b-5a7e-4810-a4a6-054ebd857da3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659337 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-default-certificate\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.659550 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-secret-volume\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.661666 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-socket-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.661698 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m27pt\" (UniqueName: \"kubernetes.io/projected/8365d2be-3472-44d1-baf3-34b41918bbf5-kube-api-access-m27pt\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.662527 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrh7h\" (UniqueName: \"kubernetes.io/projected/b78d8d91-fce0-439e-80ee-0da9d10b4f73-kube-api-access-xrh7h\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.662862 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/88fc6942-d394-42da-a9a7-b0f7b9b60f5f-cert\") pod \"ingress-canary-qlmbp\" (UID: \"88fc6942-d394-42da-a9a7-b0f7b9b60f5f\") " pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663320 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2698w\" (UniqueName: \"kubernetes.io/projected/576d39b9-9426-45dc-a2c7-c2d886150998-kube-api-access-2698w\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663358 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbkb6\" (UniqueName: \"kubernetes.io/projected/bfb1b95a-a7f4-4856-a7b2-88a97433960a-kube-api-access-fbkb6\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663376 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-mountpoint-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663415 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-metrics-certs\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663463 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-stats-auth\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663620 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-plugins-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.663674 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.665216 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-certificates\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.665850 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bfb1b95a-a7f4-4856-a7b2-88a97433960a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.665971 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2312455c-16d3-42ca-9cb0-677f6fa74c41-certs\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.666000 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/576d39b9-9426-45dc-a2c7-c2d886150998-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.666027 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-config-volume\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.666078 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/efc8668a-031f-436e-838e-0e8f9675f125-signing-key\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.666123 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.666317 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-trusted-ca\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.666379 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9gj\" (UniqueName: \"kubernetes.io/projected/804d920e-23a2-4456-807c-31ebedd7ae0c-kube-api-access-kv9gj\") pod \"multus-admission-controller-857f4d67dd-bp24t\" (UID: \"804d920e-23a2-4456-807c-31ebedd7ae0c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.700338 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.700603 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6db917b4-84de-4bd5-947f-5a2f7049f2ad-metrics-tls\") pod \"dns-operator-744455d44c-j679w\" (UID: \"6db917b4-84de-4bd5-947f-5a2f7049f2ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.709322 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.710200 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnp5m\" (UniqueName: \"kubernetes.io/projected/6db917b4-84de-4bd5-947f-5a2f7049f2ad-kube-api-access-tnp5m\") pod \"dns-operator-744455d44c-j679w\" (UID: \"6db917b4-84de-4bd5-947f-5a2f7049f2ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.710654 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2312455c-16d3-42ca-9cb0-677f6fa74c41-node-bootstrap-token\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.710795 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvjg2\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-kube-api-access-nvjg2\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.710851 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwr9p\" (UniqueName: \"kubernetes.io/projected/de6c8979-1887-4e08-9439-f6654ced778f-kube-api-access-vwr9p\") pod \"control-plane-machine-set-operator-78cbb6b69f-ptz8b\" (UID: \"de6c8979-1887-4e08-9439-f6654ced778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711072 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-tls\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711112 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711331 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/804d920e-23a2-4456-807c-31ebedd7ae0c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-bp24t\" (UID: \"804d920e-23a2-4456-807c-31ebedd7ae0c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711426 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-apiservice-cert\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711535 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7af13a9-fc75-4bdb-931f-b68bb3813c09-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711563 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbmjg\" (UniqueName: \"kubernetes.io/projected/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-kube-api-access-xbmjg\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711618 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9gnz\" (UniqueName: \"kubernetes.io/projected/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-kube-api-access-x9gnz\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711706 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-images\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.711742 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.712419 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-proxy-tls\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.712496 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b78d8d91-fce0-439e-80ee-0da9d10b4f73-profile-collector-cert\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.712840 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wckxf\" (UniqueName: \"kubernetes.io/projected/1dd0712f-3d77-4805-ba06-d1665699b2b9-kube-api-access-wckxf\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.712888 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfb1b95a-a7f4-4856-a7b2-88a97433960a-trusted-ca\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.713692 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: E1013 13:09:48.714149 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.21413091 +0000 UTC m=+143.781514980 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.714193 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7af13a9-fc75-4bdb-931f-b68bb3813c09-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.714311 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67zxl\" (UniqueName: \"kubernetes.io/projected/88fc6942-d394-42da-a9a7-b0f7b9b60f5f-kube-api-access-67zxl\") pod \"ingress-canary-qlmbp\" (UID: \"88fc6942-d394-42da-a9a7-b0f7b9b60f5f\") " pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.714641 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7af13a9-fc75-4bdb-931f-b68bb3813c09-ca-trust-extracted\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715280 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kn7r\" (UniqueName: \"kubernetes.io/projected/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-kube-api-access-9kn7r\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715318 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-bound-sa-token\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715504 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f75b051-e257-44c4-9cf8-b7fadf79176c-config-volume\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715670 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-trusted-ca\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4znr\" (UniqueName: \"kubernetes.io/projected/7f75b051-e257-44c4-9cf8-b7fadf79176c-kube-api-access-p4znr\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715834 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/de6c8979-1887-4e08-9439-f6654ced778f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ptz8b\" (UID: \"de6c8979-1887-4e08-9439-f6654ced778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715861 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-srv-cert\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715952 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b78d8d91-fce0-439e-80ee-0da9d10b4f73-srv-cert\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.715989 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trtrl\" (UniqueName: \"kubernetes.io/projected/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-kube-api-access-trtrl\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.716044 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365d2be-3472-44d1-baf3-34b41918bbf5-config\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.716261 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7f75b051-e257-44c4-9cf8-b7fadf79176c-metrics-tls\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.716391 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bfb1b95a-a7f4-4856-a7b2-88a97433960a-metrics-tls\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.716433 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/efc8668a-031f-436e-838e-0e8f9675f125-signing-cabundle\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.716614 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd0712f-3d77-4805-ba06-d1665699b2b9-service-ca-bundle\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.720418 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4dhh\" (UniqueName: \"kubernetes.io/projected/313cf642-fd7b-4e6a-b46a-caa3c76b340d-kube-api-access-b4dhh\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.720474 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-csi-data-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.720523 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6f0057b-5a7e-4810-a4a6-054ebd857da3-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-59crv\" (UID: \"c6f0057b-5a7e-4810-a4a6-054ebd857da3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.720563 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxk2h\" (UniqueName: \"kubernetes.io/projected/2312455c-16d3-42ca-9cb0-677f6fa74c41-kube-api-access-gxk2h\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.720654 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx626\" (UniqueName: \"kubernetes.io/projected/0962ce13-6826-4e10-9102-aaaa537b2f12-kube-api-access-fx626\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.732775 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7af13a9-fc75-4bdb-931f-b68bb3813c09-installation-pull-secrets\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.734999 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-tls\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.786736 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnp5m\" (UniqueName: \"kubernetes.io/projected/6db917b4-84de-4bd5-947f-5a2f7049f2ad-kube-api-access-tnp5m\") pod \"dns-operator-744455d44c-j679w\" (UID: \"6db917b4-84de-4bd5-947f-5a2f7049f2ad\") " pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.796532 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-87shb"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.801954 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-bound-sa-token\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.808685 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvjg2\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-kube-api-access-nvjg2\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822346 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822571 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2312455c-16d3-42ca-9cb0-677f6fa74c41-node-bootstrap-token\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822594 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwr9p\" (UniqueName: \"kubernetes.io/projected/de6c8979-1887-4e08-9439-f6654ced778f-kube-api-access-vwr9p\") pod \"control-plane-machine-set-operator-78cbb6b69f-ptz8b\" (UID: \"de6c8979-1887-4e08-9439-f6654ced778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822623 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822639 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/804d920e-23a2-4456-807c-31ebedd7ae0c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-bp24t\" (UID: \"804d920e-23a2-4456-807c-31ebedd7ae0c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822667 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-apiservice-cert\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: E1013 13:09:48.822677 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.322655159 +0000 UTC m=+143.890039229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822709 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbmjg\" (UniqueName: \"kubernetes.io/projected/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-kube-api-access-xbmjg\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822741 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9gnz\" (UniqueName: \"kubernetes.io/projected/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-kube-api-access-x9gnz\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822764 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-images\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822781 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822802 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-proxy-tls\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822818 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b78d8d91-fce0-439e-80ee-0da9d10b4f73-profile-collector-cert\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822835 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfb1b95a-a7f4-4856-a7b2-88a97433960a-trusted-ca\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822850 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wckxf\" (UniqueName: \"kubernetes.io/projected/1dd0712f-3d77-4805-ba06-d1665699b2b9-kube-api-access-wckxf\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822869 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67zxl\" (UniqueName: \"kubernetes.io/projected/88fc6942-d394-42da-a9a7-b0f7b9b60f5f-kube-api-access-67zxl\") pod \"ingress-canary-qlmbp\" (UID: \"88fc6942-d394-42da-a9a7-b0f7b9b60f5f\") " pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822893 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822963 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kn7r\" (UniqueName: \"kubernetes.io/projected/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-kube-api-access-9kn7r\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.822982 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f75b051-e257-44c4-9cf8-b7fadf79176c-config-volume\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823000 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4znr\" (UniqueName: \"kubernetes.io/projected/7f75b051-e257-44c4-9cf8-b7fadf79176c-kube-api-access-p4znr\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823019 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/de6c8979-1887-4e08-9439-f6654ced778f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ptz8b\" (UID: \"de6c8979-1887-4e08-9439-f6654ced778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823036 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-srv-cert\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823052 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trtrl\" (UniqueName: \"kubernetes.io/projected/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-kube-api-access-trtrl\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823066 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b78d8d91-fce0-439e-80ee-0da9d10b4f73-srv-cert\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823085 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365d2be-3472-44d1-baf3-34b41918bbf5-config\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823101 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7f75b051-e257-44c4-9cf8-b7fadf79176c-metrics-tls\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823115 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bfb1b95a-a7f4-4856-a7b2-88a97433960a-metrics-tls\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823130 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/efc8668a-031f-436e-838e-0e8f9675f125-signing-cabundle\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823158 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd0712f-3d77-4805-ba06-d1665699b2b9-service-ca-bundle\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823175 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4dhh\" (UniqueName: \"kubernetes.io/projected/313cf642-fd7b-4e6a-b46a-caa3c76b340d-kube-api-access-b4dhh\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823201 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-csi-data-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823220 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxk2h\" (UniqueName: \"kubernetes.io/projected/2312455c-16d3-42ca-9cb0-677f6fa74c41-kube-api-access-gxk2h\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823242 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6f0057b-5a7e-4810-a4a6-054ebd857da3-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-59crv\" (UID: \"c6f0057b-5a7e-4810-a4a6-054ebd857da3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823260 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx626\" (UniqueName: \"kubernetes.io/projected/0962ce13-6826-4e10-9102-aaaa537b2f12-kube-api-access-fx626\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823277 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pctl4\" (UniqueName: \"kubernetes.io/projected/efc8668a-031f-436e-838e-0e8f9675f125-kube-api-access-pctl4\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823290 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-tmpfs\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823304 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-webhook-cert\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823327 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8365d2be-3472-44d1-baf3-34b41918bbf5-serving-cert\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823352 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/576d39b9-9426-45dc-a2c7-c2d886150998-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823368 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-registration-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823384 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqmpj\" (UniqueName: \"kubernetes.io/projected/c6f0057b-5a7e-4810-a4a6-054ebd857da3-kube-api-access-qqmpj\") pod \"package-server-manager-789f6589d5-59crv\" (UID: \"c6f0057b-5a7e-4810-a4a6-054ebd857da3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823402 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-default-certificate\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823417 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-secret-volume\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823436 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m27pt\" (UniqueName: \"kubernetes.io/projected/8365d2be-3472-44d1-baf3-34b41918bbf5-kube-api-access-m27pt\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823455 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-socket-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823475 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrh7h\" (UniqueName: \"kubernetes.io/projected/b78d8d91-fce0-439e-80ee-0da9d10b4f73-kube-api-access-xrh7h\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823492 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/88fc6942-d394-42da-a9a7-b0f7b9b60f5f-cert\") pod \"ingress-canary-qlmbp\" (UID: \"88fc6942-d394-42da-a9a7-b0f7b9b60f5f\") " pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823514 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbkb6\" (UniqueName: \"kubernetes.io/projected/bfb1b95a-a7f4-4856-a7b2-88a97433960a-kube-api-access-fbkb6\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823529 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-mountpoint-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823546 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2698w\" (UniqueName: \"kubernetes.io/projected/576d39b9-9426-45dc-a2c7-c2d886150998-kube-api-access-2698w\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823565 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-metrics-certs\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823582 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-stats-auth\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823601 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-plugins-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823620 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823647 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bfb1b95a-a7f4-4856-a7b2-88a97433960a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823662 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-config-volume\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823685 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2312455c-16d3-42ca-9cb0-677f6fa74c41-certs\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823699 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/576d39b9-9426-45dc-a2c7-c2d886150998-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823714 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/efc8668a-031f-436e-838e-0e8f9675f125-signing-key\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823730 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.823751 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9gj\" (UniqueName: \"kubernetes.io/projected/804d920e-23a2-4456-807c-31ebedd7ae0c-kube-api-access-kv9gj\") pod \"multus-admission-controller-857f4d67dd-bp24t\" (UID: \"804d920e-23a2-4456-807c-31ebedd7ae0c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.824005 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-csi-data-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.825180 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-images\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.831419 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/efc8668a-031f-436e-838e-0e8f9675f125-signing-cabundle\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.835195 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.835373 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-mountpoint-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.836836 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.836882 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8vg72"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.847861 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-config-volume\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.848409 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-tmpfs\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.849628 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bfb1b95a-a7f4-4856-a7b2-88a97433960a-metrics-tls\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: E1013 13:09:48.850383 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.35027239 +0000 UTC m=+143.917656460 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.852279 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f75b051-e257-44c4-9cf8-b7fadf79176c-config-volume\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.854068 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.854623 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-apiservice-cert\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.855258 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.856617 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365d2be-3472-44d1-baf3-34b41918bbf5-config\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.857169 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/de6c8979-1887-4e08-9439-f6654ced778f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ptz8b\" (UID: \"de6c8979-1887-4e08-9439-f6654ced778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.856198 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bfb1b95a-a7f4-4856-a7b2-88a97433960a-trusted-ca\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.857697 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-proxy-tls\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.857696 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b78d8d91-fce0-439e-80ee-0da9d10b4f73-srv-cert\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.858550 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9gj\" (UniqueName: \"kubernetes.io/projected/804d920e-23a2-4456-807c-31ebedd7ae0c-kube-api-access-kv9gj\") pod \"multus-admission-controller-857f4d67dd-bp24t\" (UID: \"804d920e-23a2-4456-807c-31ebedd7ae0c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.860436 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-socket-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.860894 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/576d39b9-9426-45dc-a2c7-c2d886150998-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.861007 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-plugins-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.861008 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0962ce13-6826-4e10-9102-aaaa537b2f12-registration-dir\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.861848 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6f0057b-5a7e-4810-a4a6-054ebd857da3-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-59crv\" (UID: \"c6f0057b-5a7e-4810-a4a6-054ebd857da3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.862651 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dd0712f-3d77-4805-ba06-d1665699b2b9-service-ca-bundle\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.862839 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7f75b051-e257-44c4-9cf8-b7fadf79176c-metrics-tls\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.864359 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/88fc6942-d394-42da-a9a7-b0f7b9b60f5f-cert\") pod \"ingress-canary-qlmbp\" (UID: \"88fc6942-d394-42da-a9a7-b0f7b9b60f5f\") " pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.864630 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-webhook-cert\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.865023 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/efc8668a-031f-436e-838e-0e8f9675f125-signing-key\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.866993 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8365d2be-3472-44d1-baf3-34b41918bbf5-serving-cert\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.867143 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-srv-cert\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.867535 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-default-certificate\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.877042 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-stats-auth\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.878708 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/804d920e-23a2-4456-807c-31ebedd7ae0c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-bp24t\" (UID: \"804d920e-23a2-4456-807c-31ebedd7ae0c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.870779 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b78d8d91-fce0-439e-80ee-0da9d10b4f73-profile-collector-cert\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.879406 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fg2dj"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.880010 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-secret-volume\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.880088 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1dd0712f-3d77-4805-ba06-d1665699b2b9-metrics-certs\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.881468 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxk2h\" (UniqueName: \"kubernetes.io/projected/2312455c-16d3-42ca-9cb0-677f6fa74c41-kube-api-access-gxk2h\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.884291 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/576d39b9-9426-45dc-a2c7-c2d886150998-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.888306 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2312455c-16d3-42ca-9cb0-677f6fa74c41-node-bootstrap-token\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.894376 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2ws7t"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.899215 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.900169 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbmjg\" (UniqueName: \"kubernetes.io/projected/f65ec13e-6d29-4c66-82a1-576e6d2e51e9-kube-api-access-xbmjg\") pod \"machine-config-operator-74547568cd-p44qf\" (UID: \"f65ec13e-6d29-4c66-82a1-576e6d2e51e9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.905885 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2312455c-16d3-42ca-9cb0-677f6fa74c41-certs\") pod \"machine-config-server-5qbxw\" (UID: \"2312455c-16d3-42ca-9cb0-677f6fa74c41\") " pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:48 crc kubenswrapper[4684]: W1013 13:09:48.909057 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80e4e01f_7138_4a37_adab_30201bc0289d.slice/crio-fa44d3607869690dd2e13fddda71a450d75d385af50eecfd9e05872a92db878e WatchSource:0}: Error finding container fa44d3607869690dd2e13fddda71a450d75d385af50eecfd9e05872a92db878e: Status 404 returned error can't find the container with id fa44d3607869690dd2e13fddda71a450d75d385af50eecfd9e05872a92db878e Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.921391 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9gnz\" (UniqueName: \"kubernetes.io/projected/4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd-kube-api-access-x9gnz\") pod \"olm-operator-6b444d44fb-fmj2r\" (UID: \"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.922029 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.924600 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:48 crc kubenswrapper[4684]: E1013 13:09:48.925008 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.424992756 +0000 UTC m=+143.992376826 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.932979 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67zxl\" (UniqueName: \"kubernetes.io/projected/88fc6942-d394-42da-a9a7-b0f7b9b60f5f-kube-api-access-67zxl\") pod \"ingress-canary-qlmbp\" (UID: \"88fc6942-d394-42da-a9a7-b0f7b9b60f5f\") " pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.938966 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kxhdp"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.980961 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8xvn2"] Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.983277 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trtrl\" (UniqueName: \"kubernetes.io/projected/8da81e59-aba2-4348-a96c-c1d1e5a4d7a0-kube-api-access-trtrl\") pod \"packageserver-d55dfcdfc-f8l8c\" (UID: \"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.985691 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbkb6\" (UniqueName: \"kubernetes.io/projected/bfb1b95a-a7f4-4856-a7b2-88a97433960a-kube-api-access-fbkb6\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:48 crc kubenswrapper[4684]: I1013 13:09:48.988380 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-j679w" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.007342 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2698w\" (UniqueName: \"kubernetes.io/projected/576d39b9-9426-45dc-a2c7-c2d886150998-kube-api-access-2698w\") pod \"openshift-controller-manager-operator-756b6f6bc6-csrbz\" (UID: \"576d39b9-9426-45dc-a2c7-c2d886150998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:49 crc kubenswrapper[4684]: W1013 13:09:49.007785 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc809c1a6_f445_4257_aa0e_64e8e8e9484a.slice/crio-edd1a066b86e57f5aba841565f8e78124527a146fbd12cf74373f242810589ed WatchSource:0}: Error finding container edd1a066b86e57f5aba841565f8e78124527a146fbd12cf74373f242810589ed: Status 404 returned error can't find the container with id edd1a066b86e57f5aba841565f8e78124527a146fbd12cf74373f242810589ed Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.010093 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.026843 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.027302 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.52728738 +0000 UTC m=+144.094671450 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.034454 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bfb1b95a-a7f4-4856-a7b2-88a97433960a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8mzdj\" (UID: \"bfb1b95a-a7f4-4856-a7b2-88a97433960a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.042721 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4dhh\" (UniqueName: \"kubernetes.io/projected/313cf642-fd7b-4e6a-b46a-caa3c76b340d-kube-api-access-b4dhh\") pod \"marketplace-operator-79b997595-tg4h7\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.043330 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.049798 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.059432 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.062996 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wckxf\" (UniqueName: \"kubernetes.io/projected/1dd0712f-3d77-4805-ba06-d1665699b2b9-kube-api-access-wckxf\") pod \"router-default-5444994796-mzb8w\" (UID: \"1dd0712f-3d77-4805-ba06-d1665699b2b9\") " pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.070808 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.083459 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.092264 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx626\" (UniqueName: \"kubernetes.io/projected/0962ce13-6826-4e10-9102-aaaa537b2f12-kube-api-access-fx626\") pod \"csi-hostpathplugin-kc5f6\" (UID: \"0962ce13-6826-4e10-9102-aaaa537b2f12\") " pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.115602 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pctl4\" (UniqueName: \"kubernetes.io/projected/efc8668a-031f-436e-838e-0e8f9675f125-kube-api-access-pctl4\") pod \"service-ca-9c57cc56f-s86bq\" (UID: \"efc8668a-031f-436e-838e-0e8f9675f125\") " pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.119418 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qlmbp" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.128797 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kn7r\" (UniqueName: \"kubernetes.io/projected/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-kube-api-access-9kn7r\") pod \"collect-profiles-29339340-5jxft\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.129252 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.129564 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.629549055 +0000 UTC m=+144.196933125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.129625 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.130129 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.630119233 +0000 UTC m=+144.197503303 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.132054 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5qbxw" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.134491 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" event={"ID":"80e4e01f-7138-4a37-adab-30201bc0289d","Type":"ContainerStarted","Data":"fa44d3607869690dd2e13fddda71a450d75d385af50eecfd9e05872a92db878e"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.137726 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" event={"ID":"fff30902-ec45-47b2-a9ec-e984e1f2b240","Type":"ContainerStarted","Data":"72fb7017aad43527e61362fc3ebfef91c3b4582ae14e3ac373077cf238fe6c81"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.137767 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" event={"ID":"fff30902-ec45-47b2-a9ec-e984e1f2b240","Type":"ContainerStarted","Data":"dad1339ad8d87fe6d953b185442467436baacfa8d6eb36239bcf3769d724744d"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.143381 4684 generic.go:334] "Generic (PLEG): container finished" podID="7d71377c-822f-4d43-8d79-9d1e6ccdb340" containerID="56d35c8fc02452d5058c7a77908e8bf5326f643199bb89ae0025a615af9dbfc3" exitCode=0 Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.143523 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" event={"ID":"7d71377c-822f-4d43-8d79-9d1e6ccdb340","Type":"ContainerDied","Data":"56d35c8fc02452d5058c7a77908e8bf5326f643199bb89ae0025a615af9dbfc3"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.143730 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" event={"ID":"7d71377c-822f-4d43-8d79-9d1e6ccdb340","Type":"ContainerStarted","Data":"9531bdf2bd33589edc9bc9e9b8b76c9c0bdf83e16142d0ef9d03d30a063a4926"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.146720 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.147777 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4znr\" (UniqueName: \"kubernetes.io/projected/7f75b051-e257-44c4-9cf8-b7fadf79176c-kube-api-access-p4znr\") pod \"dns-default-lk6sg\" (UID: \"7f75b051-e257-44c4-9cf8-b7fadf79176c\") " pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.153963 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" event={"ID":"bf558883-5672-46d6-9d8a-a08070751a86","Type":"ContainerStarted","Data":"505bdcbad4443683d1ba31655d705c8e5c3819a4065ba5b3ee185e5a1aff8925"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.155951 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" event={"ID":"c809c1a6-f445-4257-aa0e-64e8e8e9484a","Type":"ContainerStarted","Data":"edd1a066b86e57f5aba841565f8e78124527a146fbd12cf74373f242810589ed"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.167412 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwr9p\" (UniqueName: \"kubernetes.io/projected/de6c8979-1887-4e08-9439-f6654ced778f-kube-api-access-vwr9p\") pod \"control-plane-machine-set-operator-78cbb6b69f-ptz8b\" (UID: \"de6c8979-1887-4e08-9439-f6654ced778f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.175997 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" event={"ID":"aa90c071-3247-46ed-a635-b234d452ae89","Type":"ContainerStarted","Data":"33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.176041 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" event={"ID":"aa90c071-3247-46ed-a635-b234d452ae89","Type":"ContainerStarted","Data":"9e698b308dde4faf1f7e312625eb2fa324a460c564e01a94e839da52e4309201"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.176610 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.184092 4684 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-c8ww6 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.184153 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" podUID="aa90c071-3247-46ed-a635-b234d452ae89" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.185141 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" event={"ID":"dd59bf2a-e617-424e-857c-2f7b94fbb743","Type":"ContainerStarted","Data":"e3d40e5bce8760988bcea39d78d8945f3fd8821ab07186795c06f259dc1f7f9c"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.185183 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" event={"ID":"dd59bf2a-e617-424e-857c-2f7b94fbb743","Type":"ContainerStarted","Data":"abd0ac76cf15c964e421c8c6d4c4a65bf51923893f308f53ae3661e0eca578c8"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.190497 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m27pt\" (UniqueName: \"kubernetes.io/projected/8365d2be-3472-44d1-baf3-34b41918bbf5-kube-api-access-m27pt\") pod \"service-ca-operator-777779d784-7vnwr\" (UID: \"8365d2be-3472-44d1-baf3-34b41918bbf5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.191528 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8vg72" event={"ID":"4697906b-fe4e-4a08-a82c-3a5fb0129fc9","Type":"ContainerStarted","Data":"44fcaccf9975bc34d273512dc5fff932436e5f774c79cf899cdd6ca9e709bbe4"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.193132 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" event={"ID":"ce48c4a9-ae90-4159-935c-911dea34cac1","Type":"ContainerStarted","Data":"6517002f145e1839790e8c6963fcd2fa6bc0d7a482cb807e3be3524fe50df9a1"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.194472 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.195485 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" event={"ID":"9c0bec8b-2056-437f-aafe-dc4194b467df","Type":"ContainerStarted","Data":"da4f04d08b60123d2a678e956043fe64ef4e40e89d198d3a22a2ac4829fc3061"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.196310 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" event={"ID":"ba1678a8-b5a0-491d-9531-a18c9500d4a3","Type":"ContainerStarted","Data":"46eb566b69873e0b43fb211c5170fa67b6be1997bb4c2558bcd8ba1f1fca4011"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.197697 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" event={"ID":"08819dbc-2a7c-4fe9-9084-eb6ce24c2857","Type":"ContainerStarted","Data":"4947120cd015d14b2a52c601492ed2b11ec2c0fa03e70bfe83fc8f96292aa586"} Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.207347 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqmpj\" (UniqueName: \"kubernetes.io/projected/c6f0057b-5a7e-4810-a4a6-054ebd857da3-kube-api-access-qqmpj\") pod \"package-server-manager-789f6589d5-59crv\" (UID: \"c6f0057b-5a7e-4810-a4a6-054ebd857da3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.230006 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrh7h\" (UniqueName: \"kubernetes.io/projected/b78d8d91-fce0-439e-80ee-0da9d10b4f73-kube-api-access-xrh7h\") pod \"catalog-operator-68c6474976-nfm22\" (UID: \"b78d8d91-fce0-439e-80ee-0da9d10b4f73\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.233161 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.234010 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.234370 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.734333797 +0000 UTC m=+144.301717867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.249105 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.304832 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.316559 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.316883 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pcpzg"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.335524 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.337404 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.837391136 +0000 UTC m=+144.404775206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.361642 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.364037 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.377596 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.388724 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.404430 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.416742 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.419174 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.434099 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.437466 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.441536 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-j679w"] Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.442243 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:49.942170508 +0000 UTC m=+144.509554578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.450015 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.486359 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.487853 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rzq52"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.493032 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.496631 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.544790 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.545363 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.04534477 +0000 UTC m=+144.612728840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.554709 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.647118 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.647480 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.14746266 +0000 UTC m=+144.714846740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.648089 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.648872 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.148863923 +0000 UTC m=+144.716247993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.749437 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.749891 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.249875488 +0000 UTC m=+144.817259558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.783349 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-l6c9m" podStartSLOduration=124.783329879 podStartE2EDuration="2m4.783329879s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:49.741165687 +0000 UTC m=+144.308549767" watchObservedRunningTime="2025-10-13 13:09:49.783329879 +0000 UTC m=+144.350713959" Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.811519 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-bp24t"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.832132 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r"] Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.851198 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.851754 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.351712269 +0000 UTC m=+144.919096339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.951813 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.952166 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.452109665 +0000 UTC m=+145.019493735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.952443 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:49 crc kubenswrapper[4684]: E1013 13:09:49.952933 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.45292243 +0000 UTC m=+145.020306500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.961207 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tg4h7"] Oct 13 13:09:49 crc kubenswrapper[4684]: W1013 13:09:49.981878 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod804d920e_23a2_4456_807c_31ebedd7ae0c.slice/crio-1570ef929579bd57af0fb5edcae4d627ef303cce7528d7a2d2257e4945f34320 WatchSource:0}: Error finding container 1570ef929579bd57af0fb5edcae4d627ef303cce7528d7a2d2257e4945f34320: Status 404 returned error can't find the container with id 1570ef929579bd57af0fb5edcae4d627ef303cce7528d7a2d2257e4945f34320 Oct 13 13:09:49 crc kubenswrapper[4684]: I1013 13:09:49.985065 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj"] Oct 13 13:09:50 crc kubenswrapper[4684]: W1013 13:09:50.003393 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4dabd6c3_f9d9_4ccb_b985_c91359c7ddfd.slice/crio-1dee619d2328aef18a47361f1385818999018e86879e97206449e77da44e138a WatchSource:0}: Error finding container 1dee619d2328aef18a47361f1385818999018e86879e97206449e77da44e138a: Status 404 returned error can't find the container with id 1dee619d2328aef18a47361f1385818999018e86879e97206449e77da44e138a Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.053618 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.054475 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.554461521 +0000 UTC m=+145.121845581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.157237 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.157560 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.657547931 +0000 UTC m=+145.224932001 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: W1013 13:09:50.187039 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod313cf642_fd7b_4e6a_b46a_caa3c76b340d.slice/crio-e02cecd09c246b87d569a3a3caeea5b0f5f9cbcdfa91120f7bca93af8fcb8989 WatchSource:0}: Error finding container e02cecd09c246b87d569a3a3caeea5b0f5f9cbcdfa91120f7bca93af8fcb8989: Status 404 returned error can't find the container with id e02cecd09c246b87d569a3a3caeea5b0f5f9cbcdfa91120f7bca93af8fcb8989 Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.197336 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qlmbp"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.201322 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-kc5f6"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.212176 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.258173 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.259035 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.75901868 +0000 UTC m=+145.326402750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.318362 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mzb8w" event={"ID":"1dd0712f-3d77-4805-ba06-d1665699b2b9","Type":"ContainerStarted","Data":"f4dfd09d1b220c7c3bf5382a62f496f912326aa076c41bea138eddb6c23f0304"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.321796 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" event={"ID":"80e4e01f-7138-4a37-adab-30201bc0289d","Type":"ContainerStarted","Data":"914e7405b37fb68fae4611fce69c3de209571632bff8a691bee5acc27ca4bb66"} Oct 13 13:09:50 crc kubenswrapper[4684]: W1013 13:09:50.323075 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8da81e59_aba2_4348_a96c_c1d1e5a4d7a0.slice/crio-74e0b0f9da58cc74bf453fdb17cc937d1538a3bc1a8dd4a47735c410a4ca9591 WatchSource:0}: Error finding container 74e0b0f9da58cc74bf453fdb17cc937d1538a3bc1a8dd4a47735c410a4ca9591: Status 404 returned error can't find the container with id 74e0b0f9da58cc74bf453fdb17cc937d1538a3bc1a8dd4a47735c410a4ca9591 Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.324215 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" event={"ID":"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb","Type":"ContainerStarted","Data":"0c2f317d28393a6577dd6e024f55263d5c62229d5c8e13dbcc6198df07e1f34d"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.327804 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" event={"ID":"a7042677-5180-4f8b-9bde-40d53c672f5a","Type":"ContainerStarted","Data":"494fd67725da59e68ffde4f893ad52e2ded11f98c8ff1bc0dac3751da7c5da9e"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.332720 4684 generic.go:334] "Generic (PLEG): container finished" podID="c809c1a6-f445-4257-aa0e-64e8e8e9484a" containerID="f1a117e4acd57a352ddd046bff95fecba07b4016ca3a3f38b38348cd81f30328" exitCode=0 Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.333332 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" event={"ID":"c809c1a6-f445-4257-aa0e-64e8e8e9484a","Type":"ContainerDied","Data":"f1a117e4acd57a352ddd046bff95fecba07b4016ca3a3f38b38348cd81f30328"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.360080 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.360435 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.860420897 +0000 UTC m=+145.427804967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.375093 4684 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-kxhdp container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.38:6443/healthz\": dial tcp 10.217.0.38:6443: connect: connection refused" start-of-body= Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.375138 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" podUID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.38:6443/healthz\": dial tcp 10.217.0.38:6443: connect: connection refused" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.392677 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" event={"ID":"804d920e-23a2-4456-807c-31ebedd7ae0c","Type":"ContainerStarted","Data":"1570ef929579bd57af0fb5edcae4d627ef303cce7528d7a2d2257e4945f34320"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.392731 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" event={"ID":"ba1678a8-b5a0-491d-9531-a18c9500d4a3","Type":"ContainerStarted","Data":"7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.392760 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.393481 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" event={"ID":"f65ec13e-6d29-4c66-82a1-576e6d2e51e9","Type":"ContainerStarted","Data":"96f6a031dada3a2642390969c665ee981fe2329eed8526fb0f1646f2c2182779"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.411215 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" event={"ID":"ccfbd45c-9af2-4a8d-904e-da0f0816bc86","Type":"ContainerStarted","Data":"7d6e5e803ae369d34d673dd4112b043a4e263ea68f212f8b409964568069bcaa"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.421545 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qlmbp" event={"ID":"88fc6942-d394-42da-a9a7-b0f7b9b60f5f","Type":"ContainerStarted","Data":"8c86efdf90909b5a272173b0ed5ac082c8e60ea350e15245a5cdcc2896626b9a"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.424280 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8vg72" event={"ID":"4697906b-fe4e-4a08-a82c-3a5fb0129fc9","Type":"ContainerStarted","Data":"234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.439110 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" event={"ID":"bfb1b95a-a7f4-4856-a7b2-88a97433960a","Type":"ContainerStarted","Data":"c9ebbb276a29739ef23904f756a2865b4c50fdfe73ec9774024d69b0f38b9ab7"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.447514 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" event={"ID":"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd","Type":"ContainerStarted","Data":"1dee619d2328aef18a47361f1385818999018e86879e97206449e77da44e138a"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.450135 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" event={"ID":"0f479582-d10a-42c3-b8cc-46740db85fd1","Type":"ContainerStarted","Data":"dba6c4d0d7da268d0fd69d7af82774cdaeee05eccf7b25a4a2017d252d18efb6"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.473439 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.475290 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:50.975273713 +0000 UTC m=+145.542657783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.554435 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.558123 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.563197 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" event={"ID":"fa43346f-4d23-4134-b1cb-a69926b2cc12","Type":"ContainerStarted","Data":"37786e79ecddedcf47cf193a29f49dde20d1fa99822c101bd6eab5b54ad3b714"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.575680 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.576286 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.076271358 +0000 UTC m=+145.643655418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.592975 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.611742 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" event={"ID":"fff30902-ec45-47b2-a9ec-e984e1f2b240","Type":"ContainerStarted","Data":"b2f19048e0691cac05266d43ee4fd261dc249a7c8998283477579390464b8c13"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.641477 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.648988 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" event={"ID":"7d71377c-822f-4d43-8d79-9d1e6ccdb340","Type":"ContainerStarted","Data":"4bb15c19285f125e0f5e04c1352cae0e15bbdf5825378c34205fc68fc921e83d"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.663635 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rzq52" event={"ID":"ba0003c9-5951-4b74-b146-38011315db63","Type":"ContainerStarted","Data":"803d619da0570e5cceceac7cb96c252644f27436af77cdb9b89bffb17ff97464"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.665547 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-j679w" event={"ID":"6db917b4-84de-4bd5-947f-5a2f7049f2ad","Type":"ContainerStarted","Data":"1c9fc489288753380fd218fa1f6d2d878c44c846bb5e503bf948d9b39368a337"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.671047 4684 generic.go:334] "Generic (PLEG): container finished" podID="ce48c4a9-ae90-4159-935c-911dea34cac1" containerID="ba92fae4aa3fd7424aa7d4e2b2d12c75013f439273485cb026a263fe9010d471" exitCode=0 Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.671352 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" event={"ID":"ce48c4a9-ae90-4159-935c-911dea34cac1","Type":"ContainerDied","Data":"ba92fae4aa3fd7424aa7d4e2b2d12c75013f439273485cb026a263fe9010d471"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.677028 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.677413 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.177397176 +0000 UTC m=+145.744781236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.678284 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.684113 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.184083675 +0000 UTC m=+145.751467745 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.684643 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" event={"ID":"bf558883-5672-46d6-9d8a-a08070751a86","Type":"ContainerStarted","Data":"425238116ac5846ad8d15959b4f69ec5a73652633f25e81fe0d880f519638e68"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.684679 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" event={"ID":"bf558883-5672-46d6-9d8a-a08070751a86","Type":"ContainerStarted","Data":"8de09a6f45b391e5ba66c2533177e63a3fa7727be4cb0b31ca554ee9bbab6162"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.687955 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.692175 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.705582 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-lk6sg"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.706192 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5qbxw" event={"ID":"2312455c-16d3-42ca-9cb0-677f6fa74c41","Type":"ContainerStarted","Data":"afe7f7df903313e67f1d9d28940920cf3f09e086fdc6dd289e502f6febafc062"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.705629 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" podStartSLOduration=124.705606395 podStartE2EDuration="2m4.705606395s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:50.675383974 +0000 UTC m=+145.242768044" watchObservedRunningTime="2025-10-13 13:09:50.705606395 +0000 UTC m=+145.272990465" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.707389 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" event={"ID":"5c139ab7-d464-4e3c-877e-5f41ae042c0e","Type":"ContainerStarted","Data":"663b99cdf4d430d3181b5ba4fbb36aa906f621e04605a0596f593bbd7f785ae1"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.711369 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" event={"ID":"313cf642-fd7b-4e6a-b46a-caa3c76b340d","Type":"ContainerStarted","Data":"e02cecd09c246b87d569a3a3caeea5b0f5f9cbcdfa91120f7bca93af8fcb8989"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.712751 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" event={"ID":"cd8628a7-0787-48d4-a4fe-43323245b766","Type":"ContainerStarted","Data":"496a6517f364d301ea217d4b9671a00470d8ee0e5f1152625ba68901cec560f5"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.719075 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-s86bq"] Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.719117 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.719131 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" event={"ID":"9c0bec8b-2056-437f-aafe-dc4194b467df","Type":"ContainerStarted","Data":"9305b4bcf21dbab9a00b68d949e0a412cc27749ef7917d0c035de602adeacaa6"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.720970 4684 patch_prober.go:28] interesting pod/console-operator-58897d9998-fg2dj container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.721039 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" podUID="9c0bec8b-2056-437f-aafe-dc4194b467df" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.723749 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" event={"ID":"0962ce13-6826-4e10-9102-aaaa537b2f12","Type":"ContainerStarted","Data":"9bdd4883b26abb613418513434870d860b1151efaab65125c1c1c98e93265a40"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.726432 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" event={"ID":"6f6919bc-7475-4569-a9da-e72d185ed9a8","Type":"ContainerStarted","Data":"00cc7f30a9744a3f159d12b3edf61ae0e41a9718072fc9772177a4bedc5759e7"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.726464 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" event={"ID":"6f6919bc-7475-4569-a9da-e72d185ed9a8","Type":"ContainerStarted","Data":"79722b78ce5713fb09509282febc396e6c0ef2180a2b3cfb12b930d6dea54a06"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.757123 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" event={"ID":"08819dbc-2a7c-4fe9-9084-eb6ce24c2857","Type":"ContainerStarted","Data":"31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.757421 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.758797 4684 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2ws7t container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.758842 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" podUID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.759121 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" event={"ID":"d7687923-06cc-47ec-98c9-5c7c9862d6a2","Type":"ContainerStarted","Data":"2905cdd080fe2ba6f02b4e518b8e2cb10cb270f8b0bc32557f5f5fd458032776"} Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.759151 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" event={"ID":"d7687923-06cc-47ec-98c9-5c7c9862d6a2","Type":"ContainerStarted","Data":"f82de49f27a65febd4010ff5f5fb84ad5fa1d9f5f7edcf0d4373f414b1abe531"} Oct 13 13:09:50 crc kubenswrapper[4684]: W1013 13:09:50.762577 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a4e53b7_9ccc_4cf5_9359_7be4899e1311.slice/crio-422948fd20a5b237cdd7048a1902144d71c584ae19616ea93f5eb375f57e2e4f WatchSource:0}: Error finding container 422948fd20a5b237cdd7048a1902144d71c584ae19616ea93f5eb375f57e2e4f: Status 404 returned error can't find the container with id 422948fd20a5b237cdd7048a1902144d71c584ae19616ea93f5eb375f57e2e4f Oct 13 13:09:50 crc kubenswrapper[4684]: W1013 13:09:50.774534 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8365d2be_3472_44d1_baf3_34b41918bbf5.slice/crio-38ce41bf0e7cbf772e8bf0cf0b241bb04340c7d53308a23afe9dde7d58377892 WatchSource:0}: Error finding container 38ce41bf0e7cbf772e8bf0cf0b241bb04340c7d53308a23afe9dde7d58377892: Status 404 returned error can't find the container with id 38ce41bf0e7cbf772e8bf0cf0b241bb04340c7d53308a23afe9dde7d58377892 Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.779312 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.780290 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.280269099 +0000 UTC m=+145.847653169 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.885751 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.887147 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.387132736 +0000 UTC m=+145.954516806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.933588 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fzt79" podStartSLOduration=125.933561851 podStartE2EDuration="2m5.933561851s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:50.918749821 +0000 UTC m=+145.486133901" watchObservedRunningTime="2025-10-13 13:09:50.933561851 +0000 UTC m=+145.500945921" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.950861 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8g6fg" podStartSLOduration=125.95084526 podStartE2EDuration="2m5.95084526s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:50.947457264 +0000 UTC m=+145.514841334" watchObservedRunningTime="2025-10-13 13:09:50.95084526 +0000 UTC m=+145.518229330" Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.990408 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:50 crc kubenswrapper[4684]: E1013 13:09:50.990554 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.490529425 +0000 UTC m=+146.057913495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:50 crc kubenswrapper[4684]: I1013 13:09:50.999401 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:50.999968 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.499953479 +0000 UTC m=+146.067337549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.003647 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8vg72" podStartSLOduration=125.002890261 podStartE2EDuration="2m5.002890261s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.000129955 +0000 UTC m=+145.567514035" watchObservedRunningTime="2025-10-13 13:09:51.002890261 +0000 UTC m=+145.570274331" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.030494 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-87shb" podStartSLOduration=125.030469159 podStartE2EDuration="2m5.030469159s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.029890191 +0000 UTC m=+145.597274271" watchObservedRunningTime="2025-10-13 13:09:51.030469159 +0000 UTC m=+145.597853229" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.091596 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" podStartSLOduration=125.091569191 podStartE2EDuration="2m5.091569191s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.09025152 +0000 UTC m=+145.657635580" watchObservedRunningTime="2025-10-13 13:09:51.091569191 +0000 UTC m=+145.658953261" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.101480 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.101926 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.601889932 +0000 UTC m=+146.169274002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.120645 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.143765 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" podStartSLOduration=125.143738835 podStartE2EDuration="2m5.143738835s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.122083481 +0000 UTC m=+145.689467551" watchObservedRunningTime="2025-10-13 13:09:51.143738835 +0000 UTC m=+145.711122905" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.204071 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.204565 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.704548779 +0000 UTC m=+146.271932849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.257229 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" podStartSLOduration=125.257209319 podStartE2EDuration="2m5.257209319s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.249722195 +0000 UTC m=+145.817106265" watchObservedRunningTime="2025-10-13 13:09:51.257209319 +0000 UTC m=+145.824593379" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.305302 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.305705 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.805683537 +0000 UTC m=+146.373067597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.323854 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rdpl6" podStartSLOduration=125.323832453 podStartE2EDuration="2m5.323832453s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.323018087 +0000 UTC m=+145.890402157" watchObservedRunningTime="2025-10-13 13:09:51.323832453 +0000 UTC m=+145.891216523" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.351704 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" podStartSLOduration=126.35168463 podStartE2EDuration="2m6.35168463s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.348948095 +0000 UTC m=+145.916332165" watchObservedRunningTime="2025-10-13 13:09:51.35168463 +0000 UTC m=+145.919068700" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.400646 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-5qbxw" podStartSLOduration=5.400623884 podStartE2EDuration="5.400623884s" podCreationTimestamp="2025-10-13 13:09:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.370396832 +0000 UTC m=+145.937780902" watchObservedRunningTime="2025-10-13 13:09:51.400623884 +0000 UTC m=+145.968007954" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.406885 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.407195 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:51.907183188 +0000 UTC m=+146.474567248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.508510 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.509271 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.009254756 +0000 UTC m=+146.576638826 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.610578 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.611064 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.111045195 +0000 UTC m=+146.678429265 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.711584 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.712096 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.21206802 +0000 UTC m=+146.779452100 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.812833 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.823379 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.323363566 +0000 UTC m=+146.890747636 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.833955 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" event={"ID":"de6c8979-1887-4e08-9439-f6654ced778f","Type":"ContainerStarted","Data":"9b5805c3e499a06bdb5a25ecc147ff9837ba10547930384833c2877d5ff825b0"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.833997 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" event={"ID":"de6c8979-1887-4e08-9439-f6654ced778f","Type":"ContainerStarted","Data":"99dfeba0f7f8f508edc2d772cc02d85445ec98e64e406df6552f0164174aec98"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.861997 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ptz8b" podStartSLOduration=125.861979668 podStartE2EDuration="2m5.861979668s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.861791382 +0000 UTC m=+146.429175452" watchObservedRunningTime="2025-10-13 13:09:51.861979668 +0000 UTC m=+146.429363738" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.863494 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" event={"ID":"c809c1a6-f445-4257-aa0e-64e8e8e9484a","Type":"ContainerStarted","Data":"066d886f27d6198d49eb7aeb10be282dc388fa4aec749eed1e26e69fdd3b441c"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.863552 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.907873 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" podStartSLOduration=125.907853556 podStartE2EDuration="2m5.907853556s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.906260657 +0000 UTC m=+146.473644727" watchObservedRunningTime="2025-10-13 13:09:51.907853556 +0000 UTC m=+146.475237626" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.911209 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-j679w" event={"ID":"6db917b4-84de-4bd5-947f-5a2f7049f2ad","Type":"ContainerStarted","Data":"ca5c58964a90ec315a54cf6333a571f0869c7c3dcf5ef68826f9996bd02662f5"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.915431 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:51 crc kubenswrapper[4684]: E1013 13:09:51.916662 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.416628969 +0000 UTC m=+146.984013039 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.931998 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mzb8w" event={"ID":"1dd0712f-3d77-4805-ba06-d1665699b2b9","Type":"ContainerStarted","Data":"4714e614f655a7a1b19d416d914bb26a155c3d09420f6c82aab728fa4b3279cf"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.953448 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" event={"ID":"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0","Type":"ContainerStarted","Data":"d8db0a42da850025ea1448cfc71fd5424daac6550f74057329f021b1bca8fcc6"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.953492 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" event={"ID":"8da81e59-aba2-4348-a96c-c1d1e5a4d7a0","Type":"ContainerStarted","Data":"74e0b0f9da58cc74bf453fdb17cc937d1538a3bc1a8dd4a47735c410a4ca9591"} Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.954621 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.995313 4684 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-f8l8c container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" start-of-body= Oct 13 13:09:51 crc kubenswrapper[4684]: I1013 13:09:51.995862 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" podUID="8da81e59-aba2-4348-a96c-c1d1e5a4d7a0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.003502 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" event={"ID":"313cf642-fd7b-4e6a-b46a-caa3c76b340d","Type":"ContainerStarted","Data":"1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.009102 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.010099 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" event={"ID":"4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd","Type":"ContainerStarted","Data":"2c709e7f1465e93aae754c8b9fa69a42ce97314d0561f8fc8514a7ddd276d41e"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.010761 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.012693 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-mzb8w" podStartSLOduration=126.0126718 podStartE2EDuration="2m6.0126718s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.956043796 +0000 UTC m=+146.523427886" watchObservedRunningTime="2025-10-13 13:09:52.0126718 +0000 UTC m=+146.580055870" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.013410 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" podStartSLOduration=126.013404333 podStartE2EDuration="2m6.013404333s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:51.996704193 +0000 UTC m=+146.564088283" watchObservedRunningTime="2025-10-13 13:09:52.013404333 +0000 UTC m=+146.580788403" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.017672 4684 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-fmj2r container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.017720 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" podUID="4dabd6c3-f9d9-4ccb-b985-c91359c7ddfd" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.017807 4684 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tg4h7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.017833 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.029001 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.029454 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.529435652 +0000 UTC m=+147.096819712 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.046671 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" podStartSLOduration=126.046644377 podStartE2EDuration="2m6.046644377s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.044211222 +0000 UTC m=+146.611595292" watchObservedRunningTime="2025-10-13 13:09:52.046644377 +0000 UTC m=+146.614028447" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.050331 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" event={"ID":"fa43346f-4d23-4134-b1cb-a69926b2cc12","Type":"ContainerStarted","Data":"2ab8c3ea5128d6ecdf4d6cd50d0e2dd38af7eaaa3a9abdf9f3037f938b3e2c22"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.055328 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" event={"ID":"a7042677-5180-4f8b-9bde-40d53c672f5a","Type":"ContainerStarted","Data":"e21611ebb18343e9adc115b45e1e65829d8bba4c43dad708a27cec7e7ea08868"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.084433 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" event={"ID":"c6f0057b-5a7e-4810-a4a6-054ebd857da3","Type":"ContainerStarted","Data":"297468203c49e3889821f09628b4703b2ec95a6a46be03c9d788f14dedf1329d"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.084488 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" event={"ID":"c6f0057b-5a7e-4810-a4a6-054ebd857da3","Type":"ContainerStarted","Data":"a29b903a92e7d2d5b0345d21a1b02fcfdc19fb6aabda8b9bf9f3cf47a62dfa8c"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.093669 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.113432 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" event={"ID":"b78d8d91-fce0-439e-80ee-0da9d10b4f73","Type":"ContainerStarted","Data":"169e90eef1a25441eca9cfe8a064cac92b50c3afdda4c5644fa339fffc34bb05"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.114415 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.122416 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" podStartSLOduration=126.122399406 podStartE2EDuration="2m6.122399406s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.079881612 +0000 UTC m=+146.647265692" watchObservedRunningTime="2025-10-13 13:09:52.122399406 +0000 UTC m=+146.689783476" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.131612 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.131694 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.631672774 +0000 UTC m=+147.199056844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.140041 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.131799 4684 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-nfm22 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.140531 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" podUID="b78d8d91-fce0-439e-80ee-0da9d10b4f73" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.143823 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.643808472 +0000 UTC m=+147.211192542 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.159684 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5qbxw" event={"ID":"2312455c-16d3-42ca-9cb0-677f6fa74c41","Type":"ContainerStarted","Data":"b94254a33deb0b42f42ae7082eb4c6ea2beaac5c2849b818773d72559ad9d080"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.184764 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4bdxl" podStartSLOduration=126.184739466 podStartE2EDuration="2m6.184739466s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.141143059 +0000 UTC m=+146.708527129" watchObservedRunningTime="2025-10-13 13:09:52.184739466 +0000 UTC m=+146.752123536" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.186035 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" podStartSLOduration=126.186029377 podStartE2EDuration="2m6.186029377s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.181534137 +0000 UTC m=+146.748918207" watchObservedRunningTime="2025-10-13 13:09:52.186029377 +0000 UTC m=+146.753413447" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.201474 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" event={"ID":"6f6919bc-7475-4569-a9da-e72d185ed9a8","Type":"ContainerStarted","Data":"22a7215b1089c9f6bfb3c08263fb8be6f5d9b3671abb83d8c3c641bb756a4b16"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.215340 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" podStartSLOduration=126.215306039 podStartE2EDuration="2m6.215306039s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.206012739 +0000 UTC m=+146.773396819" watchObservedRunningTime="2025-10-13 13:09:52.215306039 +0000 UTC m=+146.782690129" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.245430 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.247360 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.747328925 +0000 UTC m=+147.314712995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.247670 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.248765 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.74874289 +0000 UTC m=+147.316127130 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.253004 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" event={"ID":"b722e16a-9b60-4d7e-84ec-17f4e70cc3bb","Type":"ContainerStarted","Data":"9537448c37c8e2626dbde9a796891dbc21be0b725f5f76d2c19b914517935a79"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.275290 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" event={"ID":"8365d2be-3472-44d1-baf3-34b41918bbf5","Type":"ContainerStarted","Data":"f5f50ea49f6c81122b190bb53b0cb850ee207eae77c8e5c6fc27f23078a35ceb"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.275380 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" event={"ID":"8365d2be-3472-44d1-baf3-34b41918bbf5","Type":"ContainerStarted","Data":"38ce41bf0e7cbf772e8bf0cf0b241bb04340c7d53308a23afe9dde7d58377892"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.275667 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" podStartSLOduration=126.275646377 podStartE2EDuration="2m6.275646377s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.272455038 +0000 UTC m=+146.839839108" watchObservedRunningTime="2025-10-13 13:09:52.275646377 +0000 UTC m=+146.843030467" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.323879 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" event={"ID":"576d39b9-9426-45dc-a2c7-c2d886150998","Type":"ContainerStarted","Data":"25bcf5653cf43c5668061561bcdda5dc0dafac0684648613bfce5a8f3c4bbed1"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.323979 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" event={"ID":"576d39b9-9426-45dc-a2c7-c2d886150998","Type":"ContainerStarted","Data":"ecc7e474c3d489171d9659d14bf8ab585cecc54f2df6bf68ceffceec035d11d0"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.329766 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4lk4t" podStartSLOduration=126.329736092 podStartE2EDuration="2m6.329736092s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.32198648 +0000 UTC m=+146.889370570" watchObservedRunningTime="2025-10-13 13:09:52.329736092 +0000 UTC m=+146.897120162" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.331640 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-l52n6" podStartSLOduration=126.33163221 podStartE2EDuration="2m6.33163221s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.302204395 +0000 UTC m=+146.869588465" watchObservedRunningTime="2025-10-13 13:09:52.33163221 +0000 UTC m=+146.899016280" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.350479 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.352245 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.852218512 +0000 UTC m=+147.419602572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.354790 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7vnwr" podStartSLOduration=126.354774391 podStartE2EDuration="2m6.354774391s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.354338547 +0000 UTC m=+146.921722617" watchObservedRunningTime="2025-10-13 13:09:52.354774391 +0000 UTC m=+146.922158461" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.383444 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:52 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:52 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:52 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.383503 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.401031 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-csrbz" podStartSLOduration=126.4010014 podStartE2EDuration="2m6.4010014s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.400069231 +0000 UTC m=+146.967453301" watchObservedRunningTime="2025-10-13 13:09:52.4010014 +0000 UTC m=+146.968385470" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.431593 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" podStartSLOduration=126.431574732 podStartE2EDuration="2m6.431574732s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.431463818 +0000 UTC m=+146.998847888" watchObservedRunningTime="2025-10-13 13:09:52.431574732 +0000 UTC m=+146.998958802" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.457086 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.457171 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pcpzg" event={"ID":"cd8628a7-0787-48d4-a4fe-43323245b766","Type":"ContainerStarted","Data":"08b40b3b8057125dbc9fe24f103b67a16a8d2df6d9af381b508991a18687ff20"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.457202 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qlmbp" event={"ID":"88fc6942-d394-42da-a9a7-b0f7b9b60f5f","Type":"ContainerStarted","Data":"73dd357bfcdb1d07595b4b8b05893ef4735bc8804e98f6a61d07d7c2d0b3ca2b"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.457230 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" event={"ID":"5c139ab7-d464-4e3c-877e-5f41ae042c0e","Type":"ContainerStarted","Data":"eb9cbcdd9100d907209a97db5f4e1b1bac6959fb97e3cebae43a9d513267cb99"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.460327 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.460794 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-qlmbp" podStartSLOduration=7.460764801 podStartE2EDuration="7.460764801s" podCreationTimestamp="2025-10-13 13:09:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.459639976 +0000 UTC m=+147.027024046" watchObservedRunningTime="2025-10-13 13:09:52.460764801 +0000 UTC m=+147.028148881" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.463428 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:52.963412473 +0000 UTC m=+147.530796543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.479642 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-lk6sg" event={"ID":"7f75b051-e257-44c4-9cf8-b7fadf79176c","Type":"ContainerStarted","Data":"25c0edd694ee699de50f64b0abf90c2cccee7931cd317115c7b995d90c830a6c"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.488956 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" event={"ID":"bfb1b95a-a7f4-4856-a7b2-88a97433960a","Type":"ContainerStarted","Data":"9365fce7a0c80e415d0603ed54a00abd388f79a506876876c7d0efd2fa23e635"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.513893 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" event={"ID":"f65ec13e-6d29-4c66-82a1-576e6d2e51e9","Type":"ContainerStarted","Data":"a4a723153802c2b5541f1fc7b8e6481c3eca5f680b567d369e4d85e6223c82df"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.514045 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" event={"ID":"f65ec13e-6d29-4c66-82a1-576e6d2e51e9","Type":"ContainerStarted","Data":"8ca9b8f18209f600bf185abc7da3471195bdca8ec98bd248bf5e81fd423884db"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.548116 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" podStartSLOduration=126.54808495 podStartE2EDuration="2m6.54808495s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.546342955 +0000 UTC m=+147.113727025" watchObservedRunningTime="2025-10-13 13:09:52.54808495 +0000 UTC m=+147.115469020" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.548324 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8fv47" podStartSLOduration=126.548318587 podStartE2EDuration="2m6.548318587s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.507669251 +0000 UTC m=+147.075053311" watchObservedRunningTime="2025-10-13 13:09:52.548318587 +0000 UTC m=+147.115702657" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.567581 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" event={"ID":"6a4e53b7-9ccc-4cf5-9359-7be4899e1311","Type":"ContainerStarted","Data":"50f34ee3e95f5ea7544d0cd5c20d73c6b8bedcf8da98054dd3d141b38c1cda0e"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.567666 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" event={"ID":"6a4e53b7-9ccc-4cf5-9359-7be4899e1311","Type":"ContainerStarted","Data":"422948fd20a5b237cdd7048a1902144d71c584ae19616ea93f5eb375f57e2e4f"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.568135 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.568480 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.068456113 +0000 UTC m=+147.635840183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.568755 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.569066 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.069057732 +0000 UTC m=+147.636441802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.596927 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" event={"ID":"ccfbd45c-9af2-4a8d-904e-da0f0816bc86","Type":"ContainerStarted","Data":"ec40c427581acf621de5783ee4934590d2438694e8e626de23854416055c5d80"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.607540 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" event={"ID":"efc8668a-031f-436e-838e-0e8f9675f125","Type":"ContainerStarted","Data":"294b6a70a44e8ec2480345b9a6009a98699aa5e09280a91076c99d9243af4756"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.623345 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rzq52" event={"ID":"ba0003c9-5951-4b74-b146-38011315db63","Type":"ContainerStarted","Data":"43fdac35ffe32b42484496ffa520a35b6fd8fb187c6b352f73628883124451aa"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.624226 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.638825 4684 patch_prober.go:28] interesting pod/downloads-7954f5f757-rzq52 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.638894 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rzq52" podUID="ba0003c9-5951-4b74-b146-38011315db63" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.655472 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p44qf" podStartSLOduration=126.655443162 podStartE2EDuration="2m6.655443162s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.63963349 +0000 UTC m=+147.207017590" watchObservedRunningTime="2025-10-13 13:09:52.655443162 +0000 UTC m=+147.222827232" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.655955 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" event={"ID":"0f479582-d10a-42c3-b8cc-46740db85fd1","Type":"ContainerStarted","Data":"d200bf79a9b8df589c38746a92340713edaba242a10b8f8e9c435a9d2bd6c467"} Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.670758 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.671783 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.171761151 +0000 UTC m=+147.739145221 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.777293 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.786367 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.787390 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.287362819 +0000 UTC m=+147.854746889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.801284 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.878472 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.879657 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.379630332 +0000 UTC m=+147.947014402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.932239 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" podStartSLOduration=126.932219569 podStartE2EDuration="2m6.932219569s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.828286364 +0000 UTC m=+147.395670434" watchObservedRunningTime="2025-10-13 13:09:52.932219569 +0000 UTC m=+147.499603639" Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.981133 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:52 crc kubenswrapper[4684]: E1013 13:09:52.981575 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.481555406 +0000 UTC m=+148.048939476 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:52 crc kubenswrapper[4684]: I1013 13:09:52.993960 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" podStartSLOduration=126.993940161 podStartE2EDuration="2m6.993940161s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.932146848 +0000 UTC m=+147.499530918" watchObservedRunningTime="2025-10-13 13:09:52.993940161 +0000 UTC m=+147.561324251" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.050064 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-rzq52" podStartSLOduration=127.050045077 podStartE2EDuration="2m7.050045077s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:52.998692009 +0000 UTC m=+147.566076079" watchObservedRunningTime="2025-10-13 13:09:53.050045077 +0000 UTC m=+147.617429157" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.082281 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.082538 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.582512569 +0000 UTC m=+148.149896639 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.082999 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.083391 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.583376546 +0000 UTC m=+148.150760616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.110801 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" podStartSLOduration=128.110783759 podStartE2EDuration="2m8.110783759s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:53.07067718 +0000 UTC m=+147.638061250" watchObservedRunningTime="2025-10-13 13:09:53.110783759 +0000 UTC m=+147.678167819" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.134024 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.134075 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.151667 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.184241 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.184420 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.68439211 +0000 UTC m=+148.251776180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.184470 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.184770 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.684758162 +0000 UTC m=+148.252142232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.190961 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xk62k" podStartSLOduration=127.190937005 podStartE2EDuration="2m7.190937005s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:53.188482188 +0000 UTC m=+147.755866258" watchObservedRunningTime="2025-10-13 13:09:53.190937005 +0000 UTC m=+147.758321085" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.285629 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.285887 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.78585756 +0000 UTC m=+148.353241630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.285963 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.286360 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.786345545 +0000 UTC m=+148.353729625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.375589 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:53 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:53 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:53 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.375675 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.387493 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.387885 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.887868416 +0000 UTC m=+148.455252486 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.489611 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.490040 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:53.990024226 +0000 UTC m=+148.557408296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.590628 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.590864 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.090820925 +0000 UTC m=+148.658204995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.591219 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.591672 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.091657361 +0000 UTC m=+148.659041431 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.612027 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-fg2dj" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.691926 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.692249 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.192234172 +0000 UTC m=+148.759618242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.764793 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-lk6sg" event={"ID":"7f75b051-e257-44c4-9cf8-b7fadf79176c","Type":"ContainerStarted","Data":"0d243f6752d3f9445e04fd85fcf0143bf3aec9a374cc6ebb06ed181a0f4dd31d"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.764834 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-lk6sg" event={"ID":"7f75b051-e257-44c4-9cf8-b7fadf79176c","Type":"ContainerStarted","Data":"6ba3cae75e17f12b5d63ab8e2ce36bd80a4dc84e2b796386ced43d22baaa67e7"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.765491 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-lk6sg" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.767357 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-s86bq" event={"ID":"efc8668a-031f-436e-838e-0e8f9675f125","Type":"ContainerStarted","Data":"49a40c1b98d1cf88378413ccbbb0e248fc605fca32da5823aad6f26af8f944e2"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.772503 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" event={"ID":"b78d8d91-fce0-439e-80ee-0da9d10b4f73","Type":"ContainerStarted","Data":"f3d6d6eb583c9e9e994797e890b93801e587c7d97e25330b5bee4e0c137888b6"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.777511 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-x95d5" event={"ID":"ccfbd45c-9af2-4a8d-904e-da0f0816bc86","Type":"ContainerStarted","Data":"bfcd6807335912ef0b9d97c7e15d41bc9cfad61c307e8980e8b05d5287786af9"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.781787 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" event={"ID":"804d920e-23a2-4456-807c-31ebedd7ae0c","Type":"ContainerStarted","Data":"3107367f0c1ac85ea36c1b615874d2aa65405168f5d2a63b52fb82b38f9f3c5f"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.781835 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" event={"ID":"804d920e-23a2-4456-807c-31ebedd7ae0c","Type":"ContainerStarted","Data":"dafc68ee7b91f347b9077126db8f0f4690c601684f35bf24adfab277f6190c57"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.786984 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9m7v7" event={"ID":"fa43346f-4d23-4134-b1cb-a69926b2cc12","Type":"ContainerStarted","Data":"01d7edea5fd49510e353c3b68b18e9625e39d58e1bf5dc037f2ad24a14514cf3"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.791733 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nfm22" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.792807 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.792918 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.793242 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.293230347 +0000 UTC m=+148.860614417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.794071 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.794155 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" event={"ID":"0962ce13-6826-4e10-9102-aaaa537b2f12","Type":"ContainerStarted","Data":"eccb0942b7e0cb43c2d69da9f4b09ba2439817807f4eceead5c3932624671b74"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.796345 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8mzdj" event={"ID":"bfb1b95a-a7f4-4856-a7b2-88a97433960a","Type":"ContainerStarted","Data":"b73d0865e6fc70729fc6a8e6103eeeb6422fa89e0e7d15fb1d5a1b7b78311be9"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.798849 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-j679w" event={"ID":"6db917b4-84de-4bd5-947f-5a2f7049f2ad","Type":"ContainerStarted","Data":"b77079d736e3d5bd23940cc08f0c01631bf53b7405d4cae4039dfd69fb13c34b"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.802065 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" event={"ID":"ce48c4a9-ae90-4159-935c-911dea34cac1","Type":"ContainerStarted","Data":"8a4f75957422b6da238a3728cf11ef1ea4a34f3059db7e31b97692608a67f525"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.802091 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" event={"ID":"ce48c4a9-ae90-4159-935c-911dea34cac1","Type":"ContainerStarted","Data":"4295b81b1a31d4c498a794d2064e56ae0e77ca6afcfe1d2fe0db1ac92e5959de"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.805403 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" event={"ID":"c6f0057b-5a7e-4810-a4a6-054ebd857da3","Type":"ContainerStarted","Data":"71681c384f0a708d4dc45b51b68c9bf061e9a6e99fe4933e9f6031b91ded4d74"} Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.807472 4684 patch_prober.go:28] interesting pod/downloads-7954f5f757-rzq52 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.807492 4684 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tg4h7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.807519 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rzq52" podUID="ba0003c9-5951-4b74-b146-38011315db63" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.807535 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.813603 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-fmj2r" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.819242 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvrs" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.881824 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-lk6sg" podStartSLOduration=8.881806354 podStartE2EDuration="8.881806354s" podCreationTimestamp="2025-10-13 13:09:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:53.797503599 +0000 UTC m=+148.364887659" watchObservedRunningTime="2025-10-13 13:09:53.881806354 +0000 UTC m=+148.449190424" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.893853 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.894880 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.895372 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.895485 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.968974 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.969510 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.985159 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.485128471 +0000 UTC m=+149.052512541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:53 crc kubenswrapper[4684]: I1013 13:09:53.997087 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:53 crc kubenswrapper[4684]: E1013 13:09:53.997476 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.497457595 +0000 UTC m=+149.064841665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.012710 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.057790 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-bp24t" podStartSLOduration=128.057777053 podStartE2EDuration="2m8.057777053s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:53.938572672 +0000 UTC m=+148.505956752" watchObservedRunningTime="2025-10-13 13:09:54.057777053 +0000 UTC m=+148.625161123" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.098338 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.098733 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.598717098 +0000 UTC m=+149.166101168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.152822 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" podStartSLOduration=129.152806432 podStartE2EDuration="2m9.152806432s" podCreationTimestamp="2025-10-13 13:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:54.150757278 +0000 UTC m=+148.718141348" watchObservedRunningTime="2025-10-13 13:09:54.152806432 +0000 UTC m=+148.720190492" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.180176 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.207277 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.208003 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.208615 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.70860148 +0000 UTC m=+149.275985550 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.208872 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.228919 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-j679w" podStartSLOduration=128.22888686 podStartE2EDuration="2m8.22888686s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:54.223773571 +0000 UTC m=+148.791157641" watchObservedRunningTime="2025-10-13 13:09:54.22888686 +0000 UTC m=+148.796270920" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.327144 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.327341 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.827314985 +0000 UTC m=+149.394699055 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.327764 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.328085 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.828071529 +0000 UTC m=+149.395455599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.380260 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:54 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:54 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:54 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.380306 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.431427 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.431701 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:54.931688055 +0000 UTC m=+149.499072125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.534798 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.535111 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.035098225 +0000 UTC m=+149.602482295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.635705 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.636422 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.136407419 +0000 UTC m=+149.703791489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.738657 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.739096 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.239069125 +0000 UTC m=+149.806453205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.811144 4684 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-f8l8c container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.811211 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" podUID="8da81e59-aba2-4348-a96c-c1d1e5a4d7a0" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.843342 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.843712 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.343696303 +0000 UTC m=+149.911080373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.862972 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" event={"ID":"0962ce13-6826-4e10-9102-aaaa537b2f12","Type":"ContainerStarted","Data":"63d1634748201049c8fe99634d0edc9612f193c58cd992108d9045704b582de6"} Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.917786 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:09:54 crc kubenswrapper[4684]: I1013 13:09:54.945500 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:54 crc kubenswrapper[4684]: E1013 13:09:54.950071 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.450055564 +0000 UTC m=+150.017439634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.050811 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.051167 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.551118231 +0000 UTC m=+150.118502301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.051429 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.051738 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.551721609 +0000 UTC m=+150.119105669 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.152683 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.153441 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.653415616 +0000 UTC m=+150.220799696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.154729 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xqrpk" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.256155 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.256485 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.756472564 +0000 UTC m=+150.323856624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.358593 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.359308 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.859289936 +0000 UTC m=+150.426674006 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.370075 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:55 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:55 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:55 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.370132 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.461838 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.462183 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:55.962168419 +0000 UTC m=+150.529552489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.565891 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.566004 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.065981461 +0000 UTC m=+150.633365531 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.566087 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.566380 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.066372513 +0000 UTC m=+150.633756583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.667312 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.668381 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.168355278 +0000 UTC m=+150.735739348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.737738 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-726x4"] Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.739360 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: W1013 13:09:55.761143 4684 reflector.go:561] object-"openshift-marketplace"/"community-operators-dockercfg-dmngl": failed to list *v1.Secret: secrets "community-operators-dockercfg-dmngl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.761195 4684 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"community-operators-dockercfg-dmngl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"community-operators-dockercfg-dmngl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.769791 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzgnv\" (UniqueName: \"kubernetes.io/projected/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-kube-api-access-wzgnv\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.769873 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-utilities\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.769925 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-catalog-content\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.769982 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.770279 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.270265861 +0000 UTC m=+150.837649921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.780322 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-726x4"] Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.787982 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-f8l8c" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.870130 4684 generic.go:334] "Generic (PLEG): container finished" podID="6a4e53b7-9ccc-4cf5-9359-7be4899e1311" containerID="50f34ee3e95f5ea7544d0cd5c20d73c6b8bedcf8da98054dd3d141b38c1cda0e" exitCode=0 Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.870204 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" event={"ID":"6a4e53b7-9ccc-4cf5-9359-7be4899e1311","Type":"ContainerDied","Data":"50f34ee3e95f5ea7544d0cd5c20d73c6b8bedcf8da98054dd3d141b38c1cda0e"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.871008 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.871144 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.371117451 +0000 UTC m=+150.938501521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.871230 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-catalog-content\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.871311 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.871367 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzgnv\" (UniqueName: \"kubernetes.io/projected/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-kube-api-access-wzgnv\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.871419 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-utilities\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.871674 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.371667279 +0000 UTC m=+150.939051349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.871985 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-utilities\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.872094 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-catalog-content\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.872998 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" event={"ID":"0962ce13-6826-4e10-9102-aaaa537b2f12","Type":"ContainerStarted","Data":"dc4b96aa46f4d14c99d42427ee1672e6bbb25d0579d1666a5c7cfa3bdc6fc162"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.873026 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" event={"ID":"0962ce13-6826-4e10-9102-aaaa537b2f12","Type":"ContainerStarted","Data":"c4fb175ea4f77d38817162bf49177b8e11b91a0de0750c7930066127a37d9892"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.875215 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"05680f0c84dcda4871073111e7f7eabdad372cf22e0b05408ce530ba359db04c"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.875244 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d1b5fc026109891be2866336cfa6b38966ac067927f403b4ae04b600dc6945fa"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.878428 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"41de2f4d9e2580cc92bd84cb9cc5af51186e5cbf6acf3dcab753df3058f221dc"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.878489 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fa3df313be801d0348bb69507fff985d537d22653dfe75c1c8d5fb6d57a29835"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.879893 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"664d13b3fe7fbfc50e5cb65515d3cc1d0a79ca326e769854ea0d5ade0152fdb3"} Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.887268 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mk5s8"] Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.888179 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.892719 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.930321 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzgnv\" (UniqueName: \"kubernetes.io/projected/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-kube-api-access-wzgnv\") pod \"community-operators-726x4\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.938024 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mk5s8"] Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.972882 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.973451 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-utilities\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.973561 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwttn\" (UniqueName: \"kubernetes.io/projected/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-kube-api-access-mwttn\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:55 crc kubenswrapper[4684]: I1013 13:09:55.973873 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-catalog-content\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:55 crc kubenswrapper[4684]: E1013 13:09:55.974714 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.474685976 +0000 UTC m=+151.042070046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.034865 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-kc5f6" podStartSLOduration=10.034850018 podStartE2EDuration="10.034850018s" podCreationTimestamp="2025-10-13 13:09:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:56.033934571 +0000 UTC m=+150.601318641" watchObservedRunningTime="2025-10-13 13:09:56.034850018 +0000 UTC m=+150.602234088" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.070730 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qx8pk"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.072182 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.075392 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-catalog-content\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.075458 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-utilities\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.075488 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwttn\" (UniqueName: \"kubernetes.io/projected/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-kube-api-access-mwttn\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.075523 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: E1013 13:09:56.075866 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.575850326 +0000 UTC m=+151.143234396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.075940 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-utilities\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.075941 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-catalog-content\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.090673 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qx8pk"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.107710 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwttn\" (UniqueName: \"kubernetes.io/projected/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-kube-api-access-mwttn\") pod \"certified-operators-mk5s8\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.176326 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:56 crc kubenswrapper[4684]: E1013 13:09:56.176525 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.676500649 +0000 UTC m=+151.243884719 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.177655 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-catalog-content\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.177873 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.178022 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cfnt\" (UniqueName: \"kubernetes.io/projected/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-kube-api-access-5cfnt\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.178166 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-utilities\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: E1013 13:09:56.178231 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.678218022 +0000 UTC m=+151.245602092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.220797 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.225451 4684 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.266004 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5qskm"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.267027 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.280526 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.280734 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-catalog-content\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.280803 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cfnt\" (UniqueName: \"kubernetes.io/projected/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-kube-api-access-5cfnt\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.280836 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-utilities\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.281277 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-utilities\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: E1013 13:09:56.281682 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.781643373 +0000 UTC m=+151.349027443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.282519 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-catalog-content\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.286152 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5qskm"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.319165 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cfnt\" (UniqueName: \"kubernetes.io/projected/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-kube-api-access-5cfnt\") pod \"community-operators-qx8pk\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.366731 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:56 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:56 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:56 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.366790 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.383964 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj7ct\" (UniqueName: \"kubernetes.io/projected/65a9f2fe-16b9-4384-b620-227bbf5ed46c-kube-api-access-rj7ct\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.384004 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-utilities\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.384058 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.384103 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-catalog-content\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: E1013 13:09:56.384396 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.884383991 +0000 UTC m=+151.451768061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-qm5mj" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.486521 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.486983 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj7ct\" (UniqueName: \"kubernetes.io/projected/65a9f2fe-16b9-4384-b620-227bbf5ed46c-kube-api-access-rj7ct\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.487009 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-utilities\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: E1013 13:09:56.487122 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 13:09:56.98709508 +0000 UTC m=+151.554479150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.487281 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-catalog-content\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.487432 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-utilities\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.487666 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-catalog-content\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.517217 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj7ct\" (UniqueName: \"kubernetes.io/projected/65a9f2fe-16b9-4384-b620-227bbf5ed46c-kube-api-access-rj7ct\") pod \"certified-operators-5qskm\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.559064 4684 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-13T13:09:56.225472564Z","Handler":null,"Name":""} Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.560665 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mk5s8"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.568984 4684 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.569066 4684 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.571551 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.575983 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.576645 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.590836 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.594266 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.599178 4684 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.599241 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.633975 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.634678 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.638452 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.638494 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.681932 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.697668 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd04eb90-3b87-4a7d-8ba8-290075bace80-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.697736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd04eb90-3b87-4a7d-8ba8-290075bace80-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.723250 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-qm5mj\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.742970 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.743599 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.747179 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.747434 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.755023 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.783187 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.801021 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.801860 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd04eb90-3b87-4a7d-8ba8-290075bace80-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.801934 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.801999 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.802044 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd04eb90-3b87-4a7d-8ba8-290075bace80-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.803213 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd04eb90-3b87-4a7d-8ba8-290075bace80-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.808911 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.863171 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd04eb90-3b87-4a7d-8ba8-290075bace80-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.888988 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c6da3ee0c7f2f37702d7bb9fff6366b8da6852f086513cc4cee6e201a1b50324"} Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.889309 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.892064 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerStarted","Data":"0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd"} Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.892088 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerStarted","Data":"29dcff4846b3c3f70b6ec7eedbae7bc09875d7f529824c3e667a05fb548eef3f"} Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.893684 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.903295 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.903343 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.903448 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.933743 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:56 crc kubenswrapper[4684]: I1013 13:09:56.956286 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.064358 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.127754 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qm5mj"] Oct 13 13:09:57 crc kubenswrapper[4684]: W1013 13:09:57.142245 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7af13a9_fc75_4bdb_931f_b68bb3813c09.slice/crio-a8bda9a57e13b8a66b6b1339556ec090a9d4b80ba80b24e6fd3149290b47c76b WatchSource:0}: Error finding container a8bda9a57e13b8a66b6b1339556ec090a9d4b80ba80b24e6fd3149290b47c76b: Status 404 returned error can't find the container with id a8bda9a57e13b8a66b6b1339556ec090a9d4b80ba80b24e6fd3149290b47c76b Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.173604 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.217190 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qx8pk"] Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.223247 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-726x4"] Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.254706 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.258291 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5qskm"] Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.312246 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kn7r\" (UniqueName: \"kubernetes.io/projected/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-kube-api-access-9kn7r\") pod \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.312363 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-secret-volume\") pod \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.312413 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-config-volume\") pod \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\" (UID: \"6a4e53b7-9ccc-4cf5-9359-7be4899e1311\") " Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.313793 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-config-volume" (OuterVolumeSpecName: "config-volume") pod "6a4e53b7-9ccc-4cf5-9359-7be4899e1311" (UID: "6a4e53b7-9ccc-4cf5-9359-7be4899e1311"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.325477 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-kube-api-access-9kn7r" (OuterVolumeSpecName: "kube-api-access-9kn7r") pod "6a4e53b7-9ccc-4cf5-9359-7be4899e1311" (UID: "6a4e53b7-9ccc-4cf5-9359-7be4899e1311"). InnerVolumeSpecName "kube-api-access-9kn7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.330844 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6a4e53b7-9ccc-4cf5-9359-7be4899e1311" (UID: "6a4e53b7-9ccc-4cf5-9359-7be4899e1311"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.356579 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.369322 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:57 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:57 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:57 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.369378 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.413702 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.413735 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kn7r\" (UniqueName: \"kubernetes.io/projected/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-kube-api-access-9kn7r\") on node \"crc\" DevicePath \"\"" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.413752 4684 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a4e53b7-9ccc-4cf5-9359-7be4899e1311-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.654695 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8lm6p"] Oct 13 13:09:57 crc kubenswrapper[4684]: E1013 13:09:57.655223 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a4e53b7-9ccc-4cf5-9359-7be4899e1311" containerName="collect-profiles" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.655236 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a4e53b7-9ccc-4cf5-9359-7be4899e1311" containerName="collect-profiles" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.655352 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a4e53b7-9ccc-4cf5-9359-7be4899e1311" containerName="collect-profiles" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.675369 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8lm6p"] Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.675548 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.693408 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.822022 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-utilities\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.822142 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-catalog-content\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.822173 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g46s7\" (UniqueName: \"kubernetes.io/projected/4d0943d5-e534-4e02-bda6-bf77c5c39882-kube-api-access-g46s7\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.903251 4684 generic.go:334] "Generic (PLEG): container finished" podID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerID="dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9" exitCode=0 Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.903467 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qx8pk" event={"ID":"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5","Type":"ContainerDied","Data":"dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.903574 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qx8pk" event={"ID":"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5","Type":"ContainerStarted","Data":"77afe3e88c4a478d1179e34dd10f460d5090458edf03cb3170125553918db01d"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.905995 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dd04eb90-3b87-4a7d-8ba8-290075bace80","Type":"ContainerStarted","Data":"b550a2b4286cf28c434d5df0b69194cc006413c2cc675ccef23d9a83929dba30"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.906026 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dd04eb90-3b87-4a7d-8ba8-290075bace80","Type":"ContainerStarted","Data":"271dcb7079831b8ad6ca630d0586a95584da04357de25ec55ecf3a4db535492a"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.909574 4684 generic.go:334] "Generic (PLEG): container finished" podID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerID="0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd" exitCode=0 Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.909690 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerDied","Data":"0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.913688 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" event={"ID":"e7af13a9-fc75-4bdb-931f-b68bb3813c09","Type":"ContainerStarted","Data":"a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.913732 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" event={"ID":"e7af13a9-fc75-4bdb-931f-b68bb3813c09","Type":"ContainerStarted","Data":"a8bda9a57e13b8a66b6b1339556ec090a9d4b80ba80b24e6fd3149290b47c76b"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.914059 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.923919 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g46s7\" (UniqueName: \"kubernetes.io/projected/4d0943d5-e534-4e02-bda6-bf77c5c39882-kube-api-access-g46s7\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.923996 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-utilities\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.924076 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-catalog-content\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.925222 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-catalog-content\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.925974 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-utilities\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.933291 4684 generic.go:334] "Generic (PLEG): container finished" podID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerID="2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650" exitCode=0 Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.933414 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-726x4" event={"ID":"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8","Type":"ContainerDied","Data":"2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.933452 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-726x4" event={"ID":"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8","Type":"ContainerStarted","Data":"f96b3c703eb4db85854b9f4577ac4be74cd2baff652cf4afe52d2c2ff36e68e3"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.938964 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" event={"ID":"6a4e53b7-9ccc-4cf5-9359-7be4899e1311","Type":"ContainerDied","Data":"422948fd20a5b237cdd7048a1902144d71c584ae19616ea93f5eb375f57e2e4f"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.939002 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="422948fd20a5b237cdd7048a1902144d71c584ae19616ea93f5eb375f57e2e4f" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.939081 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.951704 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g46s7\" (UniqueName: \"kubernetes.io/projected/4d0943d5-e534-4e02-bda6-bf77c5c39882-kube-api-access-g46s7\") pod \"redhat-marketplace-8lm6p\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.963494 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" podStartSLOduration=131.963467875 podStartE2EDuration="2m11.963467875s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:57.952475493 +0000 UTC m=+152.519859593" watchObservedRunningTime="2025-10-13 13:09:57.963467875 +0000 UTC m=+152.530851955" Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.989183 4684 generic.go:334] "Generic (PLEG): container finished" podID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerID="df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7" exitCode=0 Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.989307 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5qskm" event={"ID":"65a9f2fe-16b9-4384-b620-227bbf5ed46c","Type":"ContainerDied","Data":"df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.989364 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5qskm" event={"ID":"65a9f2fe-16b9-4384-b620-227bbf5ed46c","Type":"ContainerStarted","Data":"5a1185d18e8acea1c65cc9bccf6480852913481f7d3a3b73b31f4fbbbde09d8d"} Oct 13 13:09:57 crc kubenswrapper[4684]: I1013 13:09:57.996107 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=1.9960721110000001 podStartE2EDuration="1.996072111s" podCreationTimestamp="2025-10-13 13:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:57.97196706 +0000 UTC m=+152.539351130" watchObservedRunningTime="2025-10-13 13:09:57.996072111 +0000 UTC m=+152.563456181" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.012436 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4e0c11f7-9c98-4c41-8246-6cdd68a23172","Type":"ContainerStarted","Data":"e0828e7616444c0cb2f2a2daff3009608057cf35323c26250358298a72783c08"} Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.012539 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4e0c11f7-9c98-4c41-8246-6cdd68a23172","Type":"ContainerStarted","Data":"1de4820b9b1eeec2b1fe3eb81086806f36a51a7e51086260010f122484ffea24"} Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.027836 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.064006 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xcncl"] Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.065545 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.067643 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.067626398 podStartE2EDuration="2.067626398s" podCreationTimestamp="2025-10-13 13:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:09:58.066093141 +0000 UTC m=+152.633477211" watchObservedRunningTime="2025-10-13 13:09:58.067626398 +0000 UTC m=+152.635010468" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.076641 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcncl"] Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.113977 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.130608 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-catalog-content\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.130736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-utilities\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.132011 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9vfw\" (UniqueName: \"kubernetes.io/projected/f63222d6-7d3d-4b06-a32c-517ad8e72492-kube-api-access-z9vfw\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.232942 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-catalog-content\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.233017 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-utilities\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.233068 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9vfw\" (UniqueName: \"kubernetes.io/projected/f63222d6-7d3d-4b06-a32c-517ad8e72492-kube-api-access-z9vfw\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.233660 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-utilities\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.234081 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-catalog-content\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.256089 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.257103 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.258065 4684 patch_prober.go:28] interesting pod/console-f9d7485db-8vg72 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.258108 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8vg72" podUID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.260703 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9vfw\" (UniqueName: \"kubernetes.io/projected/f63222d6-7d3d-4b06-a32c-517ad8e72492-kube-api-access-z9vfw\") pod \"redhat-marketplace-xcncl\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.373130 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:58 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:58 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:58 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.373462 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.397018 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.397603 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8lm6p"] Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.397702 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.397734 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.405234 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:09:58 crc kubenswrapper[4684]: W1013 13:09:58.406938 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d0943d5_e534_4e02_bda6_bf77c5c39882.slice/crio-789e2f6b47cc90662b27f005d11b48763d7591c1e51667916a825423c0b7c10e WatchSource:0}: Error finding container 789e2f6b47cc90662b27f005d11b48763d7591c1e51667916a825423c0b7c10e: Status 404 returned error can't find the container with id 789e2f6b47cc90662b27f005d11b48763d7591c1e51667916a825423c0b7c10e Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.407048 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.590872 4684 patch_prober.go:28] interesting pod/downloads-7954f5f757-rzq52 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.591371 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rzq52" podUID="ba0003c9-5951-4b74-b146-38011315db63" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.591009 4684 patch_prober.go:28] interesting pod/downloads-7954f5f757-rzq52 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.591744 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rzq52" podUID="ba0003c9-5951-4b74-b146-38011315db63" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.700275 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcncl"] Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.856887 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zr84p"] Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.859212 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.862780 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.864836 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zr84p"] Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.957735 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-utilities\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.957809 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-catalog-content\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:58 crc kubenswrapper[4684]: I1013 13:09:58.957870 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x77mc\" (UniqueName: \"kubernetes.io/projected/84d0ce46-2872-4733-8d42-fd1e2680dff3-kube-api-access-x77mc\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.024573 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerID="2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88" exitCode=0 Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.024637 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8lm6p" event={"ID":"4d0943d5-e534-4e02-bda6-bf77c5c39882","Type":"ContainerDied","Data":"2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88"} Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.024662 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8lm6p" event={"ID":"4d0943d5-e534-4e02-bda6-bf77c5c39882","Type":"ContainerStarted","Data":"789e2f6b47cc90662b27f005d11b48763d7591c1e51667916a825423c0b7c10e"} Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.027366 4684 generic.go:334] "Generic (PLEG): container finished" podID="4e0c11f7-9c98-4c41-8246-6cdd68a23172" containerID="e0828e7616444c0cb2f2a2daff3009608057cf35323c26250358298a72783c08" exitCode=0 Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.027461 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4e0c11f7-9c98-4c41-8246-6cdd68a23172","Type":"ContainerDied","Data":"e0828e7616444c0cb2f2a2daff3009608057cf35323c26250358298a72783c08"} Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.033102 4684 generic.go:334] "Generic (PLEG): container finished" podID="dd04eb90-3b87-4a7d-8ba8-290075bace80" containerID="b550a2b4286cf28c434d5df0b69194cc006413c2cc675ccef23d9a83929dba30" exitCode=0 Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.033200 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dd04eb90-3b87-4a7d-8ba8-290075bace80","Type":"ContainerDied","Data":"b550a2b4286cf28c434d5df0b69194cc006413c2cc675ccef23d9a83929dba30"} Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.035660 4684 generic.go:334] "Generic (PLEG): container finished" podID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerID="2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018" exitCode=0 Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.036363 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcncl" event={"ID":"f63222d6-7d3d-4b06-a32c-517ad8e72492","Type":"ContainerDied","Data":"2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018"} Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.036440 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcncl" event={"ID":"f63222d6-7d3d-4b06-a32c-517ad8e72492","Type":"ContainerStarted","Data":"d64abc6eda33b1d66d9a760121eba2d7ebc5bdb0d93008d6cc09f659441b0a95"} Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.045016 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-8xvn2" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.059031 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-catalog-content\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.059127 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x77mc\" (UniqueName: \"kubernetes.io/projected/84d0ce46-2872-4733-8d42-fd1e2680dff3-kube-api-access-x77mc\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.059210 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-utilities\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.059788 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-utilities\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.060098 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-catalog-content\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.088582 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x77mc\" (UniqueName: \"kubernetes.io/projected/84d0ce46-2872-4733-8d42-fd1e2680dff3-kube-api-access-x77mc\") pod \"redhat-operators-zr84p\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.187066 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.254607 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2fb5s"] Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.256416 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.274123 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2fb5s"] Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.368760 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-catalog-content\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.368801 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pccrd\" (UniqueName: \"kubernetes.io/projected/c1e522f6-423a-45a7-bb5c-0c26610768d8-kube-api-access-pccrd\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.368862 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-utilities\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.369017 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.372414 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:09:59 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:09:59 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:09:59 crc kubenswrapper[4684]: healthz check failed Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.372464 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.469983 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-catalog-content\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.470035 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pccrd\" (UniqueName: \"kubernetes.io/projected/c1e522f6-423a-45a7-bb5c-0c26610768d8-kube-api-access-pccrd\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.470095 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-utilities\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.470665 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-utilities\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.470943 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-catalog-content\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.493221 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pccrd\" (UniqueName: \"kubernetes.io/projected/c1e522f6-423a-45a7-bb5c-0c26610768d8-kube-api-access-pccrd\") pod \"redhat-operators-2fb5s\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.597706 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:09:59 crc kubenswrapper[4684]: I1013 13:09:59.698528 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zr84p"] Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.080568 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerStarted","Data":"9a508048f41f9348bc92393a534bd8df861c7feb87f2dbe00bda4babcb0bfc90"} Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.180483 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2fb5s"] Oct 13 13:10:00 crc kubenswrapper[4684]: W1013 13:10:00.196470 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1e522f6_423a_45a7_bb5c_0c26610768d8.slice/crio-a257a5659acd6ac17775441560d5ee4e94c4b239ce29df935e97ec70846b4912 WatchSource:0}: Error finding container a257a5659acd6ac17775441560d5ee4e94c4b239ce29df935e97ec70846b4912: Status 404 returned error can't find the container with id a257a5659acd6ac17775441560d5ee4e94c4b239ce29df935e97ec70846b4912 Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.374180 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:10:00 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:10:00 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:10:00 crc kubenswrapper[4684]: healthz check failed Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.374501 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.555809 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.559720 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.559778 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.610932 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.617925 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd04eb90-3b87-4a7d-8ba8-290075bace80-kubelet-dir\") pod \"dd04eb90-3b87-4a7d-8ba8-290075bace80\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.618042 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd04eb90-3b87-4a7d-8ba8-290075bace80-kube-api-access\") pod \"dd04eb90-3b87-4a7d-8ba8-290075bace80\" (UID: \"dd04eb90-3b87-4a7d-8ba8-290075bace80\") " Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.618123 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd04eb90-3b87-4a7d-8ba8-290075bace80-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dd04eb90-3b87-4a7d-8ba8-290075bace80" (UID: "dd04eb90-3b87-4a7d-8ba8-290075bace80"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.618802 4684 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dd04eb90-3b87-4a7d-8ba8-290075bace80-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.625716 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd04eb90-3b87-4a7d-8ba8-290075bace80-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dd04eb90-3b87-4a7d-8ba8-290075bace80" (UID: "dd04eb90-3b87-4a7d-8ba8-290075bace80"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.719719 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kubelet-dir\") pod \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.719850 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kube-api-access\") pod \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\" (UID: \"4e0c11f7-9c98-4c41-8246-6cdd68a23172\") " Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.720085 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd04eb90-3b87-4a7d-8ba8-290075bace80-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.720700 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4e0c11f7-9c98-4c41-8246-6cdd68a23172" (UID: "4e0c11f7-9c98-4c41-8246-6cdd68a23172"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.729760 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4e0c11f7-9c98-4c41-8246-6cdd68a23172" (UID: "4e0c11f7-9c98-4c41-8246-6cdd68a23172"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.822968 4684 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:00 crc kubenswrapper[4684]: I1013 13:10:00.823012 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4e0c11f7-9c98-4c41-8246-6cdd68a23172-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.136053 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dd04eb90-3b87-4a7d-8ba8-290075bace80","Type":"ContainerDied","Data":"271dcb7079831b8ad6ca630d0586a95584da04357de25ec55ecf3a4db535492a"} Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.136113 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="271dcb7079831b8ad6ca630d0586a95584da04357de25ec55ecf3a4db535492a" Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.136111 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.140522 4684 generic.go:334] "Generic (PLEG): container finished" podID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerID="387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd" exitCode=0 Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.140615 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2fb5s" event={"ID":"c1e522f6-423a-45a7-bb5c-0c26610768d8","Type":"ContainerDied","Data":"387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd"} Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.140652 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2fb5s" event={"ID":"c1e522f6-423a-45a7-bb5c-0c26610768d8","Type":"ContainerStarted","Data":"a257a5659acd6ac17775441560d5ee4e94c4b239ce29df935e97ec70846b4912"} Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.173590 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4e0c11f7-9c98-4c41-8246-6cdd68a23172","Type":"ContainerDied","Data":"1de4820b9b1eeec2b1fe3eb81086806f36a51a7e51086260010f122484ffea24"} Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.174124 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1de4820b9b1eeec2b1fe3eb81086806f36a51a7e51086260010f122484ffea24" Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.173589 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.181499 4684 generic.go:334] "Generic (PLEG): container finished" podID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerID="363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374" exitCode=0 Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.181551 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerDied","Data":"363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374"} Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.365652 4684 patch_prober.go:28] interesting pod/router-default-5444994796-mzb8w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 13:10:01 crc kubenswrapper[4684]: [-]has-synced failed: reason withheld Oct 13 13:10:01 crc kubenswrapper[4684]: [+]process-running ok Oct 13 13:10:01 crc kubenswrapper[4684]: healthz check failed Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.365710 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mzb8w" podUID="1dd0712f-3d77-4805-ba06-d1665699b2b9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 13:10:01 crc kubenswrapper[4684]: I1013 13:10:01.459627 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-lk6sg" Oct 13 13:10:02 crc kubenswrapper[4684]: I1013 13:10:02.366003 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:10:02 crc kubenswrapper[4684]: I1013 13:10:02.370330 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-mzb8w" Oct 13 13:10:08 crc kubenswrapper[4684]: I1013 13:10:08.252846 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:10:08 crc kubenswrapper[4684]: I1013 13:10:08.257842 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:10:08 crc kubenswrapper[4684]: I1013 13:10:08.360613 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:10:08 crc kubenswrapper[4684]: I1013 13:10:08.366684 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9150445c-49fc-46c8-b101-d672f0485cbb-metrics-certs\") pod \"network-metrics-daemon-mlkgd\" (UID: \"9150445c-49fc-46c8-b101-d672f0485cbb\") " pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:10:08 crc kubenswrapper[4684]: I1013 13:10:08.596344 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-rzq52" Oct 13 13:10:08 crc kubenswrapper[4684]: I1013 13:10:08.663688 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mlkgd" Oct 13 13:10:11 crc kubenswrapper[4684]: I1013 13:10:11.475156 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-mlkgd"] Oct 13 13:10:11 crc kubenswrapper[4684]: W1013 13:10:11.481446 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9150445c_49fc_46c8_b101_d672f0485cbb.slice/crio-51ad5c556664de1513d894190198c1b54d94609917d928e94b87b4b0f9efd51e WatchSource:0}: Error finding container 51ad5c556664de1513d894190198c1b54d94609917d928e94b87b4b0f9efd51e: Status 404 returned error can't find the container with id 51ad5c556664de1513d894190198c1b54d94609917d928e94b87b4b0f9efd51e Oct 13 13:10:12 crc kubenswrapper[4684]: I1013 13:10:12.247390 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" event={"ID":"9150445c-49fc-46c8-b101-d672f0485cbb","Type":"ContainerStarted","Data":"9334beda411f0665419c984177f3a683fd80c638acb918982d91f1034a45c088"} Oct 13 13:10:12 crc kubenswrapper[4684]: I1013 13:10:12.247454 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" event={"ID":"9150445c-49fc-46c8-b101-d672f0485cbb","Type":"ContainerStarted","Data":"51ad5c556664de1513d894190198c1b54d94609917d928e94b87b4b0f9efd51e"} Oct 13 13:10:13 crc kubenswrapper[4684]: I1013 13:10:13.257050 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mlkgd" event={"ID":"9150445c-49fc-46c8-b101-d672f0485cbb","Type":"ContainerStarted","Data":"56684836f392a878a3a8ed372152f308195b3fa8cc04d3455de5d98a9138828f"} Oct 13 13:10:16 crc kubenswrapper[4684]: I1013 13:10:16.796573 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:10:16 crc kubenswrapper[4684]: I1013 13:10:16.815311 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-mlkgd" podStartSLOduration=150.815287293 podStartE2EDuration="2m30.815287293s" podCreationTimestamp="2025-10-13 13:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:10:14.277262081 +0000 UTC m=+168.844646171" watchObservedRunningTime="2025-10-13 13:10:16.815287293 +0000 UTC m=+171.382671373" Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.318550 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerStarted","Data":"f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.322896 4684 generic.go:334] "Generic (PLEG): container finished" podID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerID="3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.323241 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerDied","Data":"3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.327787 4684 generic.go:334] "Generic (PLEG): container finished" podID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerID="759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.329121 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-726x4" event={"ID":"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8","Type":"ContainerDied","Data":"759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.333344 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcncl" event={"ID":"f63222d6-7d3d-4b06-a32c-517ad8e72492","Type":"ContainerDied","Data":"c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.333281 4684 generic.go:334] "Generic (PLEG): container finished" podID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerID="c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.336744 4684 generic.go:334] "Generic (PLEG): container finished" podID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerID="734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.336840 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2fb5s" event={"ID":"c1e522f6-423a-45a7-bb5c-0c26610768d8","Type":"ContainerDied","Data":"734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.345354 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerID="0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.345498 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8lm6p" event={"ID":"4d0943d5-e534-4e02-bda6-bf77c5c39882","Type":"ContainerDied","Data":"0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.350595 4684 generic.go:334] "Generic (PLEG): container finished" podID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerID="9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.350728 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5qskm" event={"ID":"65a9f2fe-16b9-4384-b620-227bbf5ed46c","Type":"ContainerDied","Data":"9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619"} Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.369865 4684 generic.go:334] "Generic (PLEG): container finished" podID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerID="391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1" exitCode=0 Oct 13 13:10:23 crc kubenswrapper[4684]: I1013 13:10:23.371744 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qx8pk" event={"ID":"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5","Type":"ContainerDied","Data":"391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1"} Oct 13 13:10:24 crc kubenswrapper[4684]: I1013 13:10:24.382489 4684 generic.go:334] "Generic (PLEG): container finished" podID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerID="f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb" exitCode=0 Oct 13 13:10:24 crc kubenswrapper[4684]: I1013 13:10:24.382576 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerDied","Data":"f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb"} Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.402587 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5qskm" event={"ID":"65a9f2fe-16b9-4384-b620-227bbf5ed46c","Type":"ContainerStarted","Data":"db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865"} Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.407432 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qx8pk" event={"ID":"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5","Type":"ContainerStarted","Data":"423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88"} Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.410050 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-726x4" event={"ID":"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8","Type":"ContainerStarted","Data":"0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd"} Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.416591 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcncl" event={"ID":"f63222d6-7d3d-4b06-a32c-517ad8e72492","Type":"ContainerStarted","Data":"2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816"} Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.422372 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2fb5s" event={"ID":"c1e522f6-423a-45a7-bb5c-0c26610768d8","Type":"ContainerStarted","Data":"71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3"} Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.422542 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5qskm" podStartSLOduration=2.643960714 podStartE2EDuration="30.422531041s" podCreationTimestamp="2025-10-13 13:09:56 +0000 UTC" firstStartedPulling="2025-10-13 13:09:57.999143617 +0000 UTC m=+152.566527687" lastFinishedPulling="2025-10-13 13:10:25.777713944 +0000 UTC m=+180.345098014" observedRunningTime="2025-10-13 13:10:26.419422454 +0000 UTC m=+180.986806564" watchObservedRunningTime="2025-10-13 13:10:26.422531041 +0000 UTC m=+180.989915111" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.438559 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xcncl" podStartSLOduration=1.7747744600000002 podStartE2EDuration="28.438533379s" podCreationTimestamp="2025-10-13 13:09:58 +0000 UTC" firstStartedPulling="2025-10-13 13:09:59.038079073 +0000 UTC m=+153.605463143" lastFinishedPulling="2025-10-13 13:10:25.701837982 +0000 UTC m=+180.269222062" observedRunningTime="2025-10-13 13:10:26.434812713 +0000 UTC m=+181.002196963" watchObservedRunningTime="2025-10-13 13:10:26.438533379 +0000 UTC m=+181.005917469" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.464629 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qx8pk" podStartSLOduration=2.445080511 podStartE2EDuration="30.464602541s" podCreationTimestamp="2025-10-13 13:09:56 +0000 UTC" firstStartedPulling="2025-10-13 13:09:57.905110559 +0000 UTC m=+152.472494629" lastFinishedPulling="2025-10-13 13:10:25.924632579 +0000 UTC m=+180.492016659" observedRunningTime="2025-10-13 13:10:26.463057702 +0000 UTC m=+181.030441772" watchObservedRunningTime="2025-10-13 13:10:26.464602541 +0000 UTC m=+181.031986621" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.482007 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-726x4" podStartSLOduration=3.672700317 podStartE2EDuration="31.481989712s" podCreationTimestamp="2025-10-13 13:09:55 +0000 UTC" firstStartedPulling="2025-10-13 13:09:57.936338571 +0000 UTC m=+152.503722641" lastFinishedPulling="2025-10-13 13:10:25.745627966 +0000 UTC m=+180.313012036" observedRunningTime="2025-10-13 13:10:26.479920777 +0000 UTC m=+181.047304867" watchObservedRunningTime="2025-10-13 13:10:26.481989712 +0000 UTC m=+181.049373782" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.577498 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.577575 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.577609 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.577632 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.595135 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:10:26 crc kubenswrapper[4684]: I1013 13:10:26.595181 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.429749 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8lm6p" event={"ID":"4d0943d5-e534-4e02-bda6-bf77c5c39882","Type":"ContainerStarted","Data":"506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa"} Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.432370 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerStarted","Data":"533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c"} Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.434632 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerStarted","Data":"bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa"} Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.455500 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8lm6p" podStartSLOduration=3.408620084 podStartE2EDuration="30.455474281s" podCreationTimestamp="2025-10-13 13:09:57 +0000 UTC" firstStartedPulling="2025-10-13 13:09:59.025933695 +0000 UTC m=+153.593317765" lastFinishedPulling="2025-10-13 13:10:26.072787892 +0000 UTC m=+180.640171962" observedRunningTime="2025-10-13 13:10:27.452418246 +0000 UTC m=+182.019802326" watchObservedRunningTime="2025-10-13 13:10:27.455474281 +0000 UTC m=+182.022858351" Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.478725 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mk5s8" podStartSLOduration=3.346280265 podStartE2EDuration="32.478707805s" podCreationTimestamp="2025-10-13 13:09:55 +0000 UTC" firstStartedPulling="2025-10-13 13:09:56.893427691 +0000 UTC m=+151.460811761" lastFinishedPulling="2025-10-13 13:10:26.025855231 +0000 UTC m=+180.593239301" observedRunningTime="2025-10-13 13:10:27.475227506 +0000 UTC m=+182.042611576" watchObservedRunningTime="2025-10-13 13:10:27.478707805 +0000 UTC m=+182.046091875" Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.496638 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2fb5s" podStartSLOduration=3.754554105 podStartE2EDuration="28.496623502s" podCreationTimestamp="2025-10-13 13:09:59 +0000 UTC" firstStartedPulling="2025-10-13 13:10:01.144736284 +0000 UTC m=+155.712120354" lastFinishedPulling="2025-10-13 13:10:25.886805681 +0000 UTC m=+180.454189751" observedRunningTime="2025-10-13 13:10:27.496541569 +0000 UTC m=+182.063925649" watchObservedRunningTime="2025-10-13 13:10:27.496623502 +0000 UTC m=+182.064007562" Oct 13 13:10:27 crc kubenswrapper[4684]: I1013 13:10:27.514804 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zr84p" podStartSLOduration=4.756826686 podStartE2EDuration="29.514785388s" podCreationTimestamp="2025-10-13 13:09:58 +0000 UTC" firstStartedPulling="2025-10-13 13:10:01.186015729 +0000 UTC m=+155.753399799" lastFinishedPulling="2025-10-13 13:10:25.943974431 +0000 UTC m=+180.511358501" observedRunningTime="2025-10-13 13:10:27.513540469 +0000 UTC m=+182.080924549" watchObservedRunningTime="2025-10-13 13:10:27.514785388 +0000 UTC m=+182.082169458" Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.028916 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.028975 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.047782 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-qx8pk" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="registry-server" probeResult="failure" output=< Oct 13 13:10:28 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:10:28 crc kubenswrapper[4684]: > Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.048690 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-726x4" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="registry-server" probeResult="failure" output=< Oct 13 13:10:28 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:10:28 crc kubenswrapper[4684]: > Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.049348 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-5qskm" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="registry-server" probeResult="failure" output=< Oct 13 13:10:28 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:10:28 crc kubenswrapper[4684]: > Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.077581 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.405824 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.405916 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:10:28 crc kubenswrapper[4684]: I1013 13:10:28.442876 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:10:29 crc kubenswrapper[4684]: I1013 13:10:29.187332 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:10:29 crc kubenswrapper[4684]: I1013 13:10:29.187380 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:10:29 crc kubenswrapper[4684]: I1013 13:10:29.384731 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-59crv" Oct 13 13:10:29 crc kubenswrapper[4684]: I1013 13:10:29.597917 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:10:29 crc kubenswrapper[4684]: I1013 13:10:29.598641 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:10:30 crc kubenswrapper[4684]: I1013 13:10:30.227920 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zr84p" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="registry-server" probeResult="failure" output=< Oct 13 13:10:30 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:10:30 crc kubenswrapper[4684]: > Oct 13 13:10:30 crc kubenswrapper[4684]: I1013 13:10:30.560150 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:10:30 crc kubenswrapper[4684]: I1013 13:10:30.560233 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:10:30 crc kubenswrapper[4684]: I1013 13:10:30.634554 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2fb5s" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="registry-server" probeResult="failure" output=< Oct 13 13:10:30 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:10:30 crc kubenswrapper[4684]: > Oct 13 13:10:34 crc kubenswrapper[4684]: I1013 13:10:34.216084 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.221809 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.222213 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.269552 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.577762 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.633840 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.654428 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.660594 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.705977 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.708144 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:10:36 crc kubenswrapper[4684]: I1013 13:10:36.715532 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.073068 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.100628 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qx8pk"] Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.457007 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.498352 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qx8pk" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="registry-server" containerID="cri-o://423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88" gracePeriod=2 Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.864235 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.972849 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-catalog-content\") pod \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.973010 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-utilities\") pod \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.973177 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cfnt\" (UniqueName: \"kubernetes.io/projected/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-kube-api-access-5cfnt\") pod \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\" (UID: \"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5\") " Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.974166 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-utilities" (OuterVolumeSpecName: "utilities") pod "c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" (UID: "c6df7e75-1be9-4d4a-a6ac-af5d83e580d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:38 crc kubenswrapper[4684]: I1013 13:10:38.979570 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-kube-api-access-5cfnt" (OuterVolumeSpecName: "kube-api-access-5cfnt") pod "c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" (UID: "c6df7e75-1be9-4d4a-a6ac-af5d83e580d5"). InnerVolumeSpecName "kube-api-access-5cfnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.024823 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" (UID: "c6df7e75-1be9-4d4a-a6ac-af5d83e580d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.074261 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cfnt\" (UniqueName: \"kubernetes.io/projected/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-kube-api-access-5cfnt\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.074300 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.074313 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.106379 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5qskm"] Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.107422 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5qskm" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="registry-server" containerID="cri-o://db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865" gracePeriod=2 Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.233036 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.284503 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.456119 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.479743 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-catalog-content\") pod \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.479804 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-utilities\") pod \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.479830 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj7ct\" (UniqueName: \"kubernetes.io/projected/65a9f2fe-16b9-4384-b620-227bbf5ed46c-kube-api-access-rj7ct\") pod \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\" (UID: \"65a9f2fe-16b9-4384-b620-227bbf5ed46c\") " Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.480574 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-utilities" (OuterVolumeSpecName: "utilities") pod "65a9f2fe-16b9-4384-b620-227bbf5ed46c" (UID: "65a9f2fe-16b9-4384-b620-227bbf5ed46c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.488239 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a9f2fe-16b9-4384-b620-227bbf5ed46c-kube-api-access-rj7ct" (OuterVolumeSpecName: "kube-api-access-rj7ct") pod "65a9f2fe-16b9-4384-b620-227bbf5ed46c" (UID: "65a9f2fe-16b9-4384-b620-227bbf5ed46c"). InnerVolumeSpecName "kube-api-access-rj7ct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.508668 4684 generic.go:334] "Generic (PLEG): container finished" podID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerID="db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865" exitCode=0 Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.508747 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5qskm" event={"ID":"65a9f2fe-16b9-4384-b620-227bbf5ed46c","Type":"ContainerDied","Data":"db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865"} Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.508782 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5qskm" event={"ID":"65a9f2fe-16b9-4384-b620-227bbf5ed46c","Type":"ContainerDied","Data":"5a1185d18e8acea1c65cc9bccf6480852913481f7d3a3b73b31f4fbbbde09d8d"} Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.508803 4684 scope.go:117] "RemoveContainer" containerID="db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.508958 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5qskm" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.513281 4684 generic.go:334] "Generic (PLEG): container finished" podID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerID="423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88" exitCode=0 Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.514066 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qx8pk" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.520067 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qx8pk" event={"ID":"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5","Type":"ContainerDied","Data":"423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88"} Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.520141 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qx8pk" event={"ID":"c6df7e75-1be9-4d4a-a6ac-af5d83e580d5","Type":"ContainerDied","Data":"77afe3e88c4a478d1179e34dd10f460d5090458edf03cb3170125553918db01d"} Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.527288 4684 scope.go:117] "RemoveContainer" containerID="9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.549695 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qx8pk"] Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.550726 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65a9f2fe-16b9-4384-b620-227bbf5ed46c" (UID: "65a9f2fe-16b9-4384-b620-227bbf5ed46c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.550971 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qx8pk"] Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.551127 4684 scope.go:117] "RemoveContainer" containerID="df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.566518 4684 scope.go:117] "RemoveContainer" containerID="db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865" Oct 13 13:10:39 crc kubenswrapper[4684]: E1013 13:10:39.567129 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865\": container with ID starting with db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865 not found: ID does not exist" containerID="db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.567206 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865"} err="failed to get container status \"db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865\": rpc error: code = NotFound desc = could not find container \"db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865\": container with ID starting with db8d854738f636b17122ed25d21c3941ac27a476850982870295f59b7f2ff865 not found: ID does not exist" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.567284 4684 scope.go:117] "RemoveContainer" containerID="9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619" Oct 13 13:10:39 crc kubenswrapper[4684]: E1013 13:10:39.567749 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619\": container with ID starting with 9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619 not found: ID does not exist" containerID="9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.567807 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619"} err="failed to get container status \"9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619\": rpc error: code = NotFound desc = could not find container \"9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619\": container with ID starting with 9dad95f8c2082a4ec8ec265f43c825864c07450a9331241493901800044be619 not found: ID does not exist" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.567846 4684 scope.go:117] "RemoveContainer" containerID="df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7" Oct 13 13:10:39 crc kubenswrapper[4684]: E1013 13:10:39.568211 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7\": container with ID starting with df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7 not found: ID does not exist" containerID="df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.568238 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7"} err="failed to get container status \"df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7\": rpc error: code = NotFound desc = could not find container \"df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7\": container with ID starting with df43878c38d23e50da55eea172eab25cb6c8f6394a1ae1edb79d0da302ffa6c7 not found: ID does not exist" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.568256 4684 scope.go:117] "RemoveContainer" containerID="423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.581068 4684 scope.go:117] "RemoveContainer" containerID="391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.581496 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.581539 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj7ct\" (UniqueName: \"kubernetes.io/projected/65a9f2fe-16b9-4384-b620-227bbf5ed46c-kube-api-access-rj7ct\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.581655 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65a9f2fe-16b9-4384-b620-227bbf5ed46c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.596522 4684 scope.go:117] "RemoveContainer" containerID="dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.620272 4684 scope.go:117] "RemoveContainer" containerID="423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88" Oct 13 13:10:39 crc kubenswrapper[4684]: E1013 13:10:39.621042 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88\": container with ID starting with 423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88 not found: ID does not exist" containerID="423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.621078 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88"} err="failed to get container status \"423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88\": rpc error: code = NotFound desc = could not find container \"423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88\": container with ID starting with 423fb922a3634cf92d175f09cc7053dbe47638eb51280163542dd873f274cd88 not found: ID does not exist" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.621106 4684 scope.go:117] "RemoveContainer" containerID="391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1" Oct 13 13:10:39 crc kubenswrapper[4684]: E1013 13:10:39.621529 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1\": container with ID starting with 391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1 not found: ID does not exist" containerID="391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.621549 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1"} err="failed to get container status \"391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1\": rpc error: code = NotFound desc = could not find container \"391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1\": container with ID starting with 391a97f567b3bcf566bc2143a1aecb7305a9eee1fdf13ed826dc18d08418ddb1 not found: ID does not exist" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.621563 4684 scope.go:117] "RemoveContainer" containerID="dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9" Oct 13 13:10:39 crc kubenswrapper[4684]: E1013 13:10:39.622942 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9\": container with ID starting with dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9 not found: ID does not exist" containerID="dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.622966 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9"} err="failed to get container status \"dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9\": rpc error: code = NotFound desc = could not find container \"dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9\": container with ID starting with dfa26532f2a2ab76887234c50f6b1919fa268945dfc382708cecc938ff45c5a9 not found: ID does not exist" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.642325 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.696348 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.831716 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5qskm"] Oct 13 13:10:39 crc kubenswrapper[4684]: I1013 13:10:39.837206 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5qskm"] Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.363697 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" path="/var/lib/kubelet/pods/65a9f2fe-16b9-4384-b620-227bbf5ed46c/volumes" Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.364978 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" path="/var/lib/kubelet/pods/c6df7e75-1be9-4d4a-a6ac-af5d83e580d5/volumes" Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.504612 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcncl"] Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.504917 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xcncl" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="registry-server" containerID="cri-o://2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816" gracePeriod=2 Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.819283 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.899223 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9vfw\" (UniqueName: \"kubernetes.io/projected/f63222d6-7d3d-4b06-a32c-517ad8e72492-kube-api-access-z9vfw\") pod \"f63222d6-7d3d-4b06-a32c-517ad8e72492\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.899381 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-utilities\") pod \"f63222d6-7d3d-4b06-a32c-517ad8e72492\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.899412 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-catalog-content\") pod \"f63222d6-7d3d-4b06-a32c-517ad8e72492\" (UID: \"f63222d6-7d3d-4b06-a32c-517ad8e72492\") " Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.900236 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-utilities" (OuterVolumeSpecName: "utilities") pod "f63222d6-7d3d-4b06-a32c-517ad8e72492" (UID: "f63222d6-7d3d-4b06-a32c-517ad8e72492"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.905855 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f63222d6-7d3d-4b06-a32c-517ad8e72492-kube-api-access-z9vfw" (OuterVolumeSpecName: "kube-api-access-z9vfw") pod "f63222d6-7d3d-4b06-a32c-517ad8e72492" (UID: "f63222d6-7d3d-4b06-a32c-517ad8e72492"). InnerVolumeSpecName "kube-api-access-z9vfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:10:40 crc kubenswrapper[4684]: I1013 13:10:40.914217 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f63222d6-7d3d-4b06-a32c-517ad8e72492" (UID: "f63222d6-7d3d-4b06-a32c-517ad8e72492"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.001067 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.001102 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63222d6-7d3d-4b06-a32c-517ad8e72492-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.001116 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9vfw\" (UniqueName: \"kubernetes.io/projected/f63222d6-7d3d-4b06-a32c-517ad8e72492-kube-api-access-z9vfw\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.261611 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kxhdp"] Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.529677 4684 generic.go:334] "Generic (PLEG): container finished" podID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerID="2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816" exitCode=0 Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.529731 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcncl" event={"ID":"f63222d6-7d3d-4b06-a32c-517ad8e72492","Type":"ContainerDied","Data":"2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816"} Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.529780 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xcncl" event={"ID":"f63222d6-7d3d-4b06-a32c-517ad8e72492","Type":"ContainerDied","Data":"d64abc6eda33b1d66d9a760121eba2d7ebc5bdb0d93008d6cc09f659441b0a95"} Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.529803 4684 scope.go:117] "RemoveContainer" containerID="2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.529807 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xcncl" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.558426 4684 scope.go:117] "RemoveContainer" containerID="c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.564993 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcncl"] Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.569516 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xcncl"] Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.591482 4684 scope.go:117] "RemoveContainer" containerID="2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.608313 4684 scope.go:117] "RemoveContainer" containerID="2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816" Oct 13 13:10:41 crc kubenswrapper[4684]: E1013 13:10:41.608873 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816\": container with ID starting with 2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816 not found: ID does not exist" containerID="2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.608972 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816"} err="failed to get container status \"2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816\": rpc error: code = NotFound desc = could not find container \"2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816\": container with ID starting with 2e49960de03a20f7468e2235958cb7f9045b8569586474b46bff6ac841328816 not found: ID does not exist" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.609000 4684 scope.go:117] "RemoveContainer" containerID="c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53" Oct 13 13:10:41 crc kubenswrapper[4684]: E1013 13:10:41.609307 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53\": container with ID starting with c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53 not found: ID does not exist" containerID="c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.609334 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53"} err="failed to get container status \"c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53\": rpc error: code = NotFound desc = could not find container \"c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53\": container with ID starting with c7a65017baa717e50634d185fb7370d99ea29fa581ce004a2969f10cf990bc53 not found: ID does not exist" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.609351 4684 scope.go:117] "RemoveContainer" containerID="2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018" Oct 13 13:10:41 crc kubenswrapper[4684]: E1013 13:10:41.609663 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018\": container with ID starting with 2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018 not found: ID does not exist" containerID="2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018" Oct 13 13:10:41 crc kubenswrapper[4684]: I1013 13:10:41.609708 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018"} err="failed to get container status \"2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018\": rpc error: code = NotFound desc = could not find container \"2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018\": container with ID starting with 2b43c87648309e0dc39f8fa68955dc2811b456133141a368086baae600efd018 not found: ID does not exist" Oct 13 13:10:42 crc kubenswrapper[4684]: I1013 13:10:42.358541 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" path="/var/lib/kubelet/pods/f63222d6-7d3d-4b06-a32c-517ad8e72492/volumes" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.502978 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2fb5s"] Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.503512 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2fb5s" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="registry-server" containerID="cri-o://71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3" gracePeriod=2 Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.803846 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.834241 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pccrd\" (UniqueName: \"kubernetes.io/projected/c1e522f6-423a-45a7-bb5c-0c26610768d8-kube-api-access-pccrd\") pod \"c1e522f6-423a-45a7-bb5c-0c26610768d8\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.834307 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-utilities\") pod \"c1e522f6-423a-45a7-bb5c-0c26610768d8\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.834408 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-catalog-content\") pod \"c1e522f6-423a-45a7-bb5c-0c26610768d8\" (UID: \"c1e522f6-423a-45a7-bb5c-0c26610768d8\") " Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.835377 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-utilities" (OuterVolumeSpecName: "utilities") pod "c1e522f6-423a-45a7-bb5c-0c26610768d8" (UID: "c1e522f6-423a-45a7-bb5c-0c26610768d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.842204 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1e522f6-423a-45a7-bb5c-0c26610768d8-kube-api-access-pccrd" (OuterVolumeSpecName: "kube-api-access-pccrd") pod "c1e522f6-423a-45a7-bb5c-0c26610768d8" (UID: "c1e522f6-423a-45a7-bb5c-0c26610768d8"). InnerVolumeSpecName "kube-api-access-pccrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.912924 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1e522f6-423a-45a7-bb5c-0c26610768d8" (UID: "c1e522f6-423a-45a7-bb5c-0c26610768d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.935852 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pccrd\" (UniqueName: \"kubernetes.io/projected/c1e522f6-423a-45a7-bb5c-0c26610768d8-kube-api-access-pccrd\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.935878 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:43 crc kubenswrapper[4684]: I1013 13:10:43.935887 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1e522f6-423a-45a7-bb5c-0c26610768d8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.549805 4684 generic.go:334] "Generic (PLEG): container finished" podID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerID="71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3" exitCode=0 Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.549867 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2fb5s" event={"ID":"c1e522f6-423a-45a7-bb5c-0c26610768d8","Type":"ContainerDied","Data":"71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3"} Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.549926 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2fb5s" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.550184 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2fb5s" event={"ID":"c1e522f6-423a-45a7-bb5c-0c26610768d8","Type":"ContainerDied","Data":"a257a5659acd6ac17775441560d5ee4e94c4b239ce29df935e97ec70846b4912"} Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.550210 4684 scope.go:117] "RemoveContainer" containerID="71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.571131 4684 scope.go:117] "RemoveContainer" containerID="734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.573580 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2fb5s"] Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.577343 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2fb5s"] Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.586421 4684 scope.go:117] "RemoveContainer" containerID="387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.603865 4684 scope.go:117] "RemoveContainer" containerID="71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3" Oct 13 13:10:44 crc kubenswrapper[4684]: E1013 13:10:44.604456 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3\": container with ID starting with 71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3 not found: ID does not exist" containerID="71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.604541 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3"} err="failed to get container status \"71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3\": rpc error: code = NotFound desc = could not find container \"71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3\": container with ID starting with 71af0e831056477baef076a30aa2a60e2426e1665ae5366598d340aa51ba27b3 not found: ID does not exist" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.604571 4684 scope.go:117] "RemoveContainer" containerID="734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a" Oct 13 13:10:44 crc kubenswrapper[4684]: E1013 13:10:44.604886 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a\": container with ID starting with 734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a not found: ID does not exist" containerID="734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.604934 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a"} err="failed to get container status \"734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a\": rpc error: code = NotFound desc = could not find container \"734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a\": container with ID starting with 734e0ecc76392ca756fcda0d37f3d2f9cc51c9d20e9d26921cda8129aa444d7a not found: ID does not exist" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.604954 4684 scope.go:117] "RemoveContainer" containerID="387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd" Oct 13 13:10:44 crc kubenswrapper[4684]: E1013 13:10:44.605210 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd\": container with ID starting with 387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd not found: ID does not exist" containerID="387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd" Oct 13 13:10:44 crc kubenswrapper[4684]: I1013 13:10:44.605261 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd"} err="failed to get container status \"387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd\": rpc error: code = NotFound desc = could not find container \"387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd\": container with ID starting with 387e1d4e6c1ada447414368e70c1ce8a5b9ca0623a079f8bd68b645c6f5f59dd not found: ID does not exist" Oct 13 13:10:46 crc kubenswrapper[4684]: I1013 13:10:46.358421 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" path="/var/lib/kubelet/pods/c1e522f6-423a-45a7-bb5c-0c26610768d8/volumes" Oct 13 13:11:00 crc kubenswrapper[4684]: I1013 13:11:00.560296 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:11:00 crc kubenswrapper[4684]: I1013 13:11:00.560886 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:11:00 crc kubenswrapper[4684]: I1013 13:11:00.561007 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:11:00 crc kubenswrapper[4684]: I1013 13:11:00.561795 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:11:00 crc kubenswrapper[4684]: I1013 13:11:00.561926 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5" gracePeriod=600 Oct 13 13:11:01 crc kubenswrapper[4684]: I1013 13:11:01.646212 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5" exitCode=0 Oct 13 13:11:01 crc kubenswrapper[4684]: I1013 13:11:01.646289 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5"} Oct 13 13:11:01 crc kubenswrapper[4684]: I1013 13:11:01.646741 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851"} Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.295761 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" podUID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" containerName="oauth-openshift" containerID="cri-o://7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5" gracePeriod=15 Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.632992 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667465 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6f96647944-mtqfl"] Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667667 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667678 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667687 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd04eb90-3b87-4a7d-8ba8-290075bace80" containerName="pruner" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667694 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd04eb90-3b87-4a7d-8ba8-290075bace80" containerName="pruner" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667700 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" containerName="oauth-openshift" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667706 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" containerName="oauth-openshift" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667715 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667720 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667727 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667733 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667744 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667749 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667758 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667763 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667770 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667776 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667785 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667790 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667798 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667805 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="extract-content" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667816 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0c11f7-9c98-4c41-8246-6cdd68a23172" containerName="pruner" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667822 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0c11f7-9c98-4c41-8246-6cdd68a23172" containerName="pruner" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667833 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667839 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667847 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667854 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667861 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667867 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="extract-utilities" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.667876 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667881 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667976 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0c11f7-9c98-4c41-8246-6cdd68a23172" containerName="pruner" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667986 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd04eb90-3b87-4a7d-8ba8-290075bace80" containerName="pruner" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.667994 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6df7e75-1be9-4d4a-a6ac-af5d83e580d5" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.668001 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" containerName="oauth-openshift" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.668011 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="f63222d6-7d3d-4b06-a32c-517ad8e72492" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.668020 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a9f2fe-16b9-4384-b620-227bbf5ed46c" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.668027 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e522f6-423a-45a7-bb5c-0c26610768d8" containerName="registry-server" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.668488 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.678940 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6f96647944-mtqfl"] Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.679357 4684 generic.go:334] "Generic (PLEG): container finished" podID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" containerID="7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5" exitCode=0 Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.679396 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" event={"ID":"ba1678a8-b5a0-491d-9531-a18c9500d4a3","Type":"ContainerDied","Data":"7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5"} Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.679433 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.679457 4684 scope.go:117] "RemoveContainer" containerID="7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.679442 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kxhdp" event={"ID":"ba1678a8-b5a0-491d-9531-a18c9500d4a3","Type":"ContainerDied","Data":"46eb566b69873e0b43fb211c5170fa67b6be1997bb4c2558bcd8ba1f1fca4011"} Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.710648 4684 scope.go:117] "RemoveContainer" containerID="7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5" Oct 13 13:11:06 crc kubenswrapper[4684]: E1013 13:11:06.711113 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5\": container with ID starting with 7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5 not found: ID does not exist" containerID="7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.711179 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5"} err="failed to get container status \"7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5\": rpc error: code = NotFound desc = could not find container \"7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5\": container with ID starting with 7531ce962a16ecf766710ee9f292f30bc1538a14192bef97902ff588258a21b5 not found: ID does not exist" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825214 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-serving-cert\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825280 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-session\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825394 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-login\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825490 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-dir\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825547 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-trusted-ca-bundle\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825579 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-router-certs\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825670 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-cliconfig\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825705 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-ocp-branding-template\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825763 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-policies\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825804 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-error\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825838 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-idp-0-file-data\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.825988 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wm5b\" (UniqueName: \"kubernetes.io/projected/ba1678a8-b5a0-491d-9531-a18c9500d4a3-kube-api-access-4wm5b\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.826067 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-provider-selection\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.827300 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-service-ca\") pod \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\" (UID: \"ba1678a8-b5a0-491d-9531-a18c9500d4a3\") " Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828135 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828225 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828283 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b247a845-bc25-4dd1-9a4a-3e71080ea62f-audit-dir\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828330 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-login\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.827322 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.827428 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.827624 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828334 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828501 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828558 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8x6m\" (UniqueName: \"kubernetes.io/projected/b247a845-bc25-4dd1-9a4a-3e71080ea62f-kube-api-access-b8x6m\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828598 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828620 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-error\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828687 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828739 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-session\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828808 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.828863 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829012 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829136 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829211 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-audit-policies\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829337 4684 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829369 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829395 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829421 4684 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.829494 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.833467 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.834371 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.835828 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.836198 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba1678a8-b5a0-491d-9531-a18c9500d4a3-kube-api-access-4wm5b" (OuterVolumeSpecName: "kube-api-access-4wm5b") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "kube-api-access-4wm5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.836700 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.838593 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.838743 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.838768 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.839158 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "ba1678a8-b5a0-491d-9531-a18c9500d4a3" (UID: "ba1678a8-b5a0-491d-9531-a18c9500d4a3"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.930674 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-error\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.930785 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.930846 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-session\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.930894 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.930978 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931025 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931092 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931165 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-audit-policies\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931254 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931316 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931358 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b247a845-bc25-4dd1-9a4a-3e71080ea62f-audit-dir\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931405 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-login\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931475 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931523 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8x6m\" (UniqueName: \"kubernetes.io/projected/b247a845-bc25-4dd1-9a4a-3e71080ea62f-kube-api-access-b8x6m\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931626 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931660 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931688 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931719 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931746 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wm5b\" (UniqueName: \"kubernetes.io/projected/ba1678a8-b5a0-491d-9531-a18c9500d4a3-kube-api-access-4wm5b\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931773 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931800 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931825 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.931852 4684 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ba1678a8-b5a0-491d-9531-a18c9500d4a3-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.932187 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b247a845-bc25-4dd1-9a4a-3e71080ea62f-audit-dir\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.934110 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.934385 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.934494 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-audit-policies\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.935246 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.937108 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.937722 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-error\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.938205 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.938494 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-session\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.939034 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.939820 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-user-template-login\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.940250 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.940794 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b247a845-bc25-4dd1-9a4a-3e71080ea62f-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.961965 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8x6m\" (UniqueName: \"kubernetes.io/projected/b247a845-bc25-4dd1-9a4a-3e71080ea62f-kube-api-access-b8x6m\") pod \"oauth-openshift-6f96647944-mtqfl\" (UID: \"b247a845-bc25-4dd1-9a4a-3e71080ea62f\") " pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:06 crc kubenswrapper[4684]: I1013 13:11:06.998889 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.033716 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kxhdp"] Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.040852 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kxhdp"] Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.254862 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6f96647944-mtqfl"] Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.690819 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" event={"ID":"b247a845-bc25-4dd1-9a4a-3e71080ea62f","Type":"ContainerStarted","Data":"f425e430a3bd9e428cad46f3fc79786212725caf2ac3d6d3f8aae9cc5f8db22c"} Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.690949 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" event={"ID":"b247a845-bc25-4dd1-9a4a-3e71080ea62f","Type":"ContainerStarted","Data":"b4c7c4bd4a9bdb3c706b6db7a3e3e74d7091fa42b024489994c7459cd70e6c94"} Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.691114 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:07 crc kubenswrapper[4684]: I1013 13:11:07.721514 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" podStartSLOduration=26.721486651 podStartE2EDuration="26.721486651s" podCreationTimestamp="2025-10-13 13:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:11:07.719282402 +0000 UTC m=+222.286666482" watchObservedRunningTime="2025-10-13 13:11:07.721486651 +0000 UTC m=+222.288870721" Oct 13 13:11:08 crc kubenswrapper[4684]: I1013 13:11:08.003666 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6f96647944-mtqfl" Oct 13 13:11:08 crc kubenswrapper[4684]: I1013 13:11:08.357818 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba1678a8-b5a0-491d-9531-a18c9500d4a3" path="/var/lib/kubelet/pods/ba1678a8-b5a0-491d-9531-a18c9500d4a3/volumes" Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.963760 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mk5s8"] Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.964697 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mk5s8" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="registry-server" containerID="cri-o://bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa" gracePeriod=30 Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.967667 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-726x4"] Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.968052 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-726x4" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="registry-server" containerID="cri-o://0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd" gracePeriod=30 Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.977381 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tg4h7"] Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.977751 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" containerID="cri-o://1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc" gracePeriod=30 Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.994550 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8lm6p"] Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.995122 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8lm6p" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="registry-server" containerID="cri-o://506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa" gracePeriod=30 Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.998974 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6t588"] Oct 13 13:11:18 crc kubenswrapper[4684]: I1013 13:11:18.999727 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.001919 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zr84p"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.002172 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zr84p" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="registry-server" containerID="cri-o://533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" gracePeriod=30 Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.020790 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6t588"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.084047 4684 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tg4h7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" start-of-body= Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.084108 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.29:8080/healthz\": dial tcp 10.217.0.29:8080: connect: connection refused" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.188557 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c is running failed: container process not found" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" cmd=["grpc_health_probe","-addr=:50051"] Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.189074 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c is running failed: container process not found" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" cmd=["grpc_health_probe","-addr=:50051"] Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.190019 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c is running failed: container process not found" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" cmd=["grpc_health_probe","-addr=:50051"] Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.190057 4684 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-zr84p" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="registry-server" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.195077 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc8221bd-07d6-42ed-b1dd-d81881844b60-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.195126 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhw8w\" (UniqueName: \"kubernetes.io/projected/cc8221bd-07d6-42ed-b1dd-d81881844b60-kube-api-access-nhw8w\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.195168 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cc8221bd-07d6-42ed-b1dd-d81881844b60-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.297452 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhw8w\" (UniqueName: \"kubernetes.io/projected/cc8221bd-07d6-42ed-b1dd-d81881844b60-kube-api-access-nhw8w\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.297547 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cc8221bd-07d6-42ed-b1dd-d81881844b60-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.297612 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc8221bd-07d6-42ed-b1dd-d81881844b60-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.311491 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc8221bd-07d6-42ed-b1dd-d81881844b60-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.313562 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/cc8221bd-07d6-42ed-b1dd-d81881844b60-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.326835 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhw8w\" (UniqueName: \"kubernetes.io/projected/cc8221bd-07d6-42ed-b1dd-d81881844b60-kube-api-access-nhw8w\") pod \"marketplace-operator-79b997595-6t588\" (UID: \"cc8221bd-07d6-42ed-b1dd-d81881844b60\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.459083 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.464962 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.471641 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.485828 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.490288 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509360 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-catalog-content\") pod \"4d0943d5-e534-4e02-bda6-bf77c5c39882\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509438 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-catalog-content\") pod \"84d0ce46-2872-4733-8d42-fd1e2680dff3\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509500 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-catalog-content\") pod \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509534 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-utilities\") pod \"84d0ce46-2872-4733-8d42-fd1e2680dff3\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509574 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-trusted-ca\") pod \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509605 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-utilities\") pod \"4d0943d5-e534-4e02-bda6-bf77c5c39882\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509635 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzgnv\" (UniqueName: \"kubernetes.io/projected/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-kube-api-access-wzgnv\") pod \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509669 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwttn\" (UniqueName: \"kubernetes.io/projected/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-kube-api-access-mwttn\") pod \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509700 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g46s7\" (UniqueName: \"kubernetes.io/projected/4d0943d5-e534-4e02-bda6-bf77c5c39882-kube-api-access-g46s7\") pod \"4d0943d5-e534-4e02-bda6-bf77c5c39882\" (UID: \"4d0943d5-e534-4e02-bda6-bf77c5c39882\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509740 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-utilities\") pod \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\" (UID: \"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509766 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x77mc\" (UniqueName: \"kubernetes.io/projected/84d0ce46-2872-4733-8d42-fd1e2680dff3-kube-api-access-x77mc\") pod \"84d0ce46-2872-4733-8d42-fd1e2680dff3\" (UID: \"84d0ce46-2872-4733-8d42-fd1e2680dff3\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509790 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4dhh\" (UniqueName: \"kubernetes.io/projected/313cf642-fd7b-4e6a-b46a-caa3c76b340d-kube-api-access-b4dhh\") pod \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509815 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-utilities\") pod \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509838 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-catalog-content\") pod \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\" (UID: \"607faaf0-b3f3-4ef3-978f-ad99d464f0bf\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.509868 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-operator-metrics\") pod \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\" (UID: \"313cf642-fd7b-4e6a-b46a-caa3c76b340d\") " Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.512979 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-utilities" (OuterVolumeSpecName: "utilities") pod "607faaf0-b3f3-4ef3-978f-ad99d464f0bf" (UID: "607faaf0-b3f3-4ef3-978f-ad99d464f0bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.513670 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "313cf642-fd7b-4e6a-b46a-caa3c76b340d" (UID: "313cf642-fd7b-4e6a-b46a-caa3c76b340d"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.513706 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-kube-api-access-wzgnv" (OuterVolumeSpecName: "kube-api-access-wzgnv") pod "a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" (UID: "a0f0adcd-9aa4-4c97-8a9c-72a654db14f8"). InnerVolumeSpecName "kube-api-access-wzgnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.513796 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d0943d5-e534-4e02-bda6-bf77c5c39882-kube-api-access-g46s7" (OuterVolumeSpecName: "kube-api-access-g46s7") pod "4d0943d5-e534-4e02-bda6-bf77c5c39882" (UID: "4d0943d5-e534-4e02-bda6-bf77c5c39882"). InnerVolumeSpecName "kube-api-access-g46s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.514477 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-utilities" (OuterVolumeSpecName: "utilities") pod "4d0943d5-e534-4e02-bda6-bf77c5c39882" (UID: "4d0943d5-e534-4e02-bda6-bf77c5c39882"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.516515 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-utilities" (OuterVolumeSpecName: "utilities") pod "a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" (UID: "a0f0adcd-9aa4-4c97-8a9c-72a654db14f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.516623 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-utilities" (OuterVolumeSpecName: "utilities") pod "84d0ce46-2872-4733-8d42-fd1e2680dff3" (UID: "84d0ce46-2872-4733-8d42-fd1e2680dff3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.523305 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-kube-api-access-mwttn" (OuterVolumeSpecName: "kube-api-access-mwttn") pod "607faaf0-b3f3-4ef3-978f-ad99d464f0bf" (UID: "607faaf0-b3f3-4ef3-978f-ad99d464f0bf"). InnerVolumeSpecName "kube-api-access-mwttn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.524624 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84d0ce46-2872-4733-8d42-fd1e2680dff3-kube-api-access-x77mc" (OuterVolumeSpecName: "kube-api-access-x77mc") pod "84d0ce46-2872-4733-8d42-fd1e2680dff3" (UID: "84d0ce46-2872-4733-8d42-fd1e2680dff3"). InnerVolumeSpecName "kube-api-access-x77mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.525456 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/313cf642-fd7b-4e6a-b46a-caa3c76b340d-kube-api-access-b4dhh" (OuterVolumeSpecName: "kube-api-access-b4dhh") pod "313cf642-fd7b-4e6a-b46a-caa3c76b340d" (UID: "313cf642-fd7b-4e6a-b46a-caa3c76b340d"). InnerVolumeSpecName "kube-api-access-b4dhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.526677 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d0943d5-e534-4e02-bda6-bf77c5c39882" (UID: "4d0943d5-e534-4e02-bda6-bf77c5c39882"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.527407 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "313cf642-fd7b-4e6a-b46a-caa3c76b340d" (UID: "313cf642-fd7b-4e6a-b46a-caa3c76b340d"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.606685 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" (UID: "a0f0adcd-9aa4-4c97-8a9c-72a654db14f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.607355 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "607faaf0-b3f3-4ef3-978f-ad99d464f0bf" (UID: "607faaf0-b3f3-4ef3-978f-ad99d464f0bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613673 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613732 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613742 4684 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613770 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613779 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzgnv\" (UniqueName: \"kubernetes.io/projected/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-kube-api-access-wzgnv\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613827 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwttn\" (UniqueName: \"kubernetes.io/projected/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-kube-api-access-mwttn\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613837 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g46s7\" (UniqueName: \"kubernetes.io/projected/4d0943d5-e534-4e02-bda6-bf77c5c39882-kube-api-access-g46s7\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613845 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x77mc\" (UniqueName: \"kubernetes.io/projected/84d0ce46-2872-4733-8d42-fd1e2680dff3-kube-api-access-x77mc\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613854 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613862 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4dhh\" (UniqueName: \"kubernetes.io/projected/313cf642-fd7b-4e6a-b46a-caa3c76b340d-kube-api-access-b4dhh\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613873 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613882 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607faaf0-b3f3-4ef3-978f-ad99d464f0bf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613893 4684 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/313cf642-fd7b-4e6a-b46a-caa3c76b340d-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.613920 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0943d5-e534-4e02-bda6-bf77c5c39882-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.622687 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.634232 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84d0ce46-2872-4733-8d42-fd1e2680dff3" (UID: "84d0ce46-2872-4733-8d42-fd1e2680dff3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.715230 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84d0ce46-2872-4733-8d42-fd1e2680dff3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.753476 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerID="506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa" exitCode=0 Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.753580 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8lm6p" event={"ID":"4d0943d5-e534-4e02-bda6-bf77c5c39882","Type":"ContainerDied","Data":"506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.753614 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8lm6p" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.753635 4684 scope.go:117] "RemoveContainer" containerID="506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.753619 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8lm6p" event={"ID":"4d0943d5-e534-4e02-bda6-bf77c5c39882","Type":"ContainerDied","Data":"789e2f6b47cc90662b27f005d11b48763d7591c1e51667916a825423c0b7c10e"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.756720 4684 generic.go:334] "Generic (PLEG): container finished" podID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" exitCode=0 Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.756787 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerDied","Data":"533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.756811 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr84p" event={"ID":"84d0ce46-2872-4733-8d42-fd1e2680dff3","Type":"ContainerDied","Data":"9a508048f41f9348bc92393a534bd8df861c7feb87f2dbe00bda4babcb0bfc90"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.756835 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr84p" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.758410 4684 generic.go:334] "Generic (PLEG): container finished" podID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerID="1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc" exitCode=0 Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.758470 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" event={"ID":"313cf642-fd7b-4e6a-b46a-caa3c76b340d","Type":"ContainerDied","Data":"1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.758492 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" event={"ID":"313cf642-fd7b-4e6a-b46a-caa3c76b340d","Type":"ContainerDied","Data":"e02cecd09c246b87d569a3a3caeea5b0f5f9cbcdfa91120f7bca93af8fcb8989"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.758598 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tg4h7" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.768481 4684 generic.go:334] "Generic (PLEG): container finished" podID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerID="bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa" exitCode=0 Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.768613 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mk5s8" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.769159 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerDied","Data":"bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.769495 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mk5s8" event={"ID":"607faaf0-b3f3-4ef3-978f-ad99d464f0bf","Type":"ContainerDied","Data":"29dcff4846b3c3f70b6ec7eedbae7bc09875d7f529824c3e667a05fb548eef3f"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.774612 4684 generic.go:334] "Generic (PLEG): container finished" podID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerID="0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd" exitCode=0 Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.774656 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-726x4" event={"ID":"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8","Type":"ContainerDied","Data":"0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.774684 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-726x4" event={"ID":"a0f0adcd-9aa4-4c97-8a9c-72a654db14f8","Type":"ContainerDied","Data":"f96b3c703eb4db85854b9f4577ac4be74cd2baff652cf4afe52d2c2ff36e68e3"} Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.774863 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-726x4" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.780552 4684 scope.go:117] "RemoveContainer" containerID="0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.803434 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8lm6p"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.807439 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8lm6p"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.812441 4684 scope.go:117] "RemoveContainer" containerID="2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.818098 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6t588"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.823453 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tg4h7"] Oct 13 13:11:19 crc kubenswrapper[4684]: W1013 13:11:19.824733 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc8221bd_07d6_42ed_b1dd_d81881844b60.slice/crio-696529382c1e45bbbd370380faaff6dd0af27bfe2b4473368c558bcc5357d0ad WatchSource:0}: Error finding container 696529382c1e45bbbd370380faaff6dd0af27bfe2b4473368c558bcc5357d0ad: Status 404 returned error can't find the container with id 696529382c1e45bbbd370380faaff6dd0af27bfe2b4473368c558bcc5357d0ad Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.832073 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tg4h7"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.835522 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mk5s8"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.840466 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mk5s8"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.847302 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zr84p"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.853931 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zr84p"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.856187 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-726x4"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.856553 4684 scope.go:117] "RemoveContainer" containerID="506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.857106 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa\": container with ID starting with 506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa not found: ID does not exist" containerID="506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.857158 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa"} err="failed to get container status \"506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa\": rpc error: code = NotFound desc = could not find container \"506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa\": container with ID starting with 506c58ca795929154563d39dbe99dab3df4ac019a4f98c87029f39ee5f5056aa not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.857194 4684 scope.go:117] "RemoveContainer" containerID="0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.857680 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5\": container with ID starting with 0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5 not found: ID does not exist" containerID="0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.857708 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5"} err="failed to get container status \"0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5\": rpc error: code = NotFound desc = could not find container \"0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5\": container with ID starting with 0038f4f89282e1899a9301f3c32a9cd8fa5169f97a35a1144b2a09fa6ef3b0e5 not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.857731 4684 scope.go:117] "RemoveContainer" containerID="2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.858079 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88\": container with ID starting with 2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88 not found: ID does not exist" containerID="2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.858163 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88"} err="failed to get container status \"2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88\": rpc error: code = NotFound desc = could not find container \"2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88\": container with ID starting with 2e64a6602d3aa8dd04cf7670783dabbd60461c0c7149a446ad84bad42409db88 not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.858272 4684 scope.go:117] "RemoveContainer" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.860674 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-726x4"] Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.878408 4684 scope.go:117] "RemoveContainer" containerID="f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.892521 4684 scope.go:117] "RemoveContainer" containerID="363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.921244 4684 scope.go:117] "RemoveContainer" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.923985 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c\": container with ID starting with 533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c not found: ID does not exist" containerID="533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.924025 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c"} err="failed to get container status \"533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c\": rpc error: code = NotFound desc = could not find container \"533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c\": container with ID starting with 533fe4461cfcad445dc2ebc7f1c86db6c63a584dadbd628821eea378ed35f15c not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.924048 4684 scope.go:117] "RemoveContainer" containerID="f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.924275 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb\": container with ID starting with f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb not found: ID does not exist" containerID="f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.924300 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb"} err="failed to get container status \"f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb\": rpc error: code = NotFound desc = could not find container \"f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb\": container with ID starting with f446d6964b0ca17322c54d6c4cffa8bf52307805c2afecfa9096c2c18042ffbb not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.924317 4684 scope.go:117] "RemoveContainer" containerID="363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.924535 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374\": container with ID starting with 363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374 not found: ID does not exist" containerID="363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.924569 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374"} err="failed to get container status \"363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374\": rpc error: code = NotFound desc = could not find container \"363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374\": container with ID starting with 363d4e3c54dd6e76052c55f35708cae1aa09b71df83b718dea7acbfa43dbc374 not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.924584 4684 scope.go:117] "RemoveContainer" containerID="1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.942194 4684 scope.go:117] "RemoveContainer" containerID="1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.942604 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc\": container with ID starting with 1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc not found: ID does not exist" containerID="1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.942638 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc"} err="failed to get container status \"1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc\": rpc error: code = NotFound desc = could not find container \"1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc\": container with ID starting with 1eb89c1877da49206273d347d6d0f5fcb00e76e353e04210741cd4d98e3641dc not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.942718 4684 scope.go:117] "RemoveContainer" containerID="bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.962023 4684 scope.go:117] "RemoveContainer" containerID="3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.978954 4684 scope.go:117] "RemoveContainer" containerID="0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.993296 4684 scope.go:117] "RemoveContainer" containerID="bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.993690 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa\": container with ID starting with bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa not found: ID does not exist" containerID="bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.993735 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa"} err="failed to get container status \"bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa\": rpc error: code = NotFound desc = could not find container \"bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa\": container with ID starting with bc9013d411b31f6577a7f4670aceeeed3c69842437c7ed66085e686664fe75fa not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.993767 4684 scope.go:117] "RemoveContainer" containerID="3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.994037 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9\": container with ID starting with 3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9 not found: ID does not exist" containerID="3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.994070 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9"} err="failed to get container status \"3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9\": rpc error: code = NotFound desc = could not find container \"3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9\": container with ID starting with 3d8ebf38442c8be668ca8016a0ce05fd3680e321a8e063f1567e7722eca768c9 not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.994088 4684 scope.go:117] "RemoveContainer" containerID="0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd" Oct 13 13:11:19 crc kubenswrapper[4684]: E1013 13:11:19.994340 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd\": container with ID starting with 0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd not found: ID does not exist" containerID="0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.994372 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd"} err="failed to get container status \"0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd\": rpc error: code = NotFound desc = could not find container \"0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd\": container with ID starting with 0d7af60cac97a178a1d5b1053c284a26f25ac2d1eb730555fb3cb91b680a09cd not found: ID does not exist" Oct 13 13:11:19 crc kubenswrapper[4684]: I1013 13:11:19.994389 4684 scope.go:117] "RemoveContainer" containerID="0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.008424 4684 scope.go:117] "RemoveContainer" containerID="759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.032313 4684 scope.go:117] "RemoveContainer" containerID="2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.049261 4684 scope.go:117] "RemoveContainer" containerID="0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd" Oct 13 13:11:20 crc kubenswrapper[4684]: E1013 13:11:20.049738 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd\": container with ID starting with 0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd not found: ID does not exist" containerID="0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.049771 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd"} err="failed to get container status \"0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd\": rpc error: code = NotFound desc = could not find container \"0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd\": container with ID starting with 0a0e9b5c6b294deb9b9db8f2d11a27ea6f8500f94f2feba7dbfb62259a7ceccd not found: ID does not exist" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.049793 4684 scope.go:117] "RemoveContainer" containerID="759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53" Oct 13 13:11:20 crc kubenswrapper[4684]: E1013 13:11:20.050235 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53\": container with ID starting with 759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53 not found: ID does not exist" containerID="759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.050278 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53"} err="failed to get container status \"759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53\": rpc error: code = NotFound desc = could not find container \"759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53\": container with ID starting with 759e13f1cff5cd7deaf8cbc060e0c141f756b4df77e930cbc79e52755d7a7d53 not found: ID does not exist" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.050298 4684 scope.go:117] "RemoveContainer" containerID="2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650" Oct 13 13:11:20 crc kubenswrapper[4684]: E1013 13:11:20.050723 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650\": container with ID starting with 2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650 not found: ID does not exist" containerID="2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.050751 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650"} err="failed to get container status \"2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650\": rpc error: code = NotFound desc = could not find container \"2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650\": container with ID starting with 2bd12aabff5c5ccbee1c3517581729c99fcdfb23db9e46a80956916ed1230650 not found: ID does not exist" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.361068 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" path="/var/lib/kubelet/pods/313cf642-fd7b-4e6a-b46a-caa3c76b340d/volumes" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.361965 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" path="/var/lib/kubelet/pods/4d0943d5-e534-4e02-bda6-bf77c5c39882/volumes" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.362729 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" path="/var/lib/kubelet/pods/607faaf0-b3f3-4ef3-978f-ad99d464f0bf/volumes" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.363947 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" path="/var/lib/kubelet/pods/84d0ce46-2872-4733-8d42-fd1e2680dff3/volumes" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.364626 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" path="/var/lib/kubelet/pods/a0f0adcd-9aa4-4c97-8a9c-72a654db14f8/volumes" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.782633 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" event={"ID":"cc8221bd-07d6-42ed-b1dd-d81881844b60","Type":"ContainerStarted","Data":"190647d445198a07de532fc9e3af0c2b9709a304b49434fd4d4afaa2cceb561e"} Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.782705 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" event={"ID":"cc8221bd-07d6-42ed-b1dd-d81881844b60","Type":"ContainerStarted","Data":"696529382c1e45bbbd370380faaff6dd0af27bfe2b4473368c558bcc5357d0ad"} Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.782815 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.787234 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" Oct 13 13:11:20 crc kubenswrapper[4684]: I1013 13:11:20.803876 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6t588" podStartSLOduration=2.803854827 podStartE2EDuration="2.803854827s" podCreationTimestamp="2025-10-13 13:11:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:11:20.800872964 +0000 UTC m=+235.368257054" watchObservedRunningTime="2025-10-13 13:11:20.803854827 +0000 UTC m=+235.371238897" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.195684 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qdd"] Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196104 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196122 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196137 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196147 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196161 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196172 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196186 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196195 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196211 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196219 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196229 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196240 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196255 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196263 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196281 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196291 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="extract-utilities" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196300 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196309 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196326 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196334 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196346 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196356 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="extract-content" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196371 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196380 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: E1013 13:11:21.196390 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196398 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196592 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="313cf642-fd7b-4e6a-b46a-caa3c76b340d" containerName="marketplace-operator" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196605 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0f0adcd-9aa4-4c97-8a9c-72a654db14f8" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196622 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="607faaf0-b3f3-4ef3-978f-ad99d464f0bf" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196640 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d0ce46-2872-4733-8d42-fd1e2680dff3" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.196655 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d0943d5-e534-4e02-bda6-bf77c5c39882" containerName="registry-server" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.198809 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.201495 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.207547 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qdd"] Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.235587 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fc769db-2b2f-431c-bef7-1b3c46fe628c-catalog-content\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.235653 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fc769db-2b2f-431c-bef7-1b3c46fe628c-utilities\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.235884 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxdch\" (UniqueName: \"kubernetes.io/projected/4fc769db-2b2f-431c-bef7-1b3c46fe628c-kube-api-access-fxdch\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.337440 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fc769db-2b2f-431c-bef7-1b3c46fe628c-catalog-content\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.337499 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fc769db-2b2f-431c-bef7-1b3c46fe628c-utilities\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.337593 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxdch\" (UniqueName: \"kubernetes.io/projected/4fc769db-2b2f-431c-bef7-1b3c46fe628c-kube-api-access-fxdch\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.338229 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fc769db-2b2f-431c-bef7-1b3c46fe628c-catalog-content\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.338491 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fc769db-2b2f-431c-bef7-1b3c46fe628c-utilities\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.361323 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxdch\" (UniqueName: \"kubernetes.io/projected/4fc769db-2b2f-431c-bef7-1b3c46fe628c-kube-api-access-fxdch\") pod \"redhat-marketplace-h5qdd\" (UID: \"4fc769db-2b2f-431c-bef7-1b3c46fe628c\") " pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.386829 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qzj9z"] Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.387716 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.389721 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.398773 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzj9z"] Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.438359 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnvtz\" (UniqueName: \"kubernetes.io/projected/ef7652d6-3ac3-4739-a190-5b071c4d0839-kube-api-access-fnvtz\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.438465 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef7652d6-3ac3-4739-a190-5b071c4d0839-utilities\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.438492 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef7652d6-3ac3-4739-a190-5b071c4d0839-catalog-content\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.525921 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.539743 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef7652d6-3ac3-4739-a190-5b071c4d0839-utilities\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.539808 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef7652d6-3ac3-4739-a190-5b071c4d0839-catalog-content\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.539885 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnvtz\" (UniqueName: \"kubernetes.io/projected/ef7652d6-3ac3-4739-a190-5b071c4d0839-kube-api-access-fnvtz\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.540224 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef7652d6-3ac3-4739-a190-5b071c4d0839-utilities\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.541631 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef7652d6-3ac3-4739-a190-5b071c4d0839-catalog-content\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.557223 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnvtz\" (UniqueName: \"kubernetes.io/projected/ef7652d6-3ac3-4739-a190-5b071c4d0839-kube-api-access-fnvtz\") pod \"redhat-operators-qzj9z\" (UID: \"ef7652d6-3ac3-4739-a190-5b071c4d0839\") " pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.686893 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5qdd"] Oct 13 13:11:21 crc kubenswrapper[4684]: W1013 13:11:21.694769 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fc769db_2b2f_431c_bef7_1b3c46fe628c.slice/crio-ccd64ce06fb1ec4684ecb009db7175f2ae255a1b1eb861e73389e206629b712e WatchSource:0}: Error finding container ccd64ce06fb1ec4684ecb009db7175f2ae255a1b1eb861e73389e206629b712e: Status 404 returned error can't find the container with id ccd64ce06fb1ec4684ecb009db7175f2ae255a1b1eb861e73389e206629b712e Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.708767 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.791496 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qdd" event={"ID":"4fc769db-2b2f-431c-bef7-1b3c46fe628c","Type":"ContainerStarted","Data":"ccd64ce06fb1ec4684ecb009db7175f2ae255a1b1eb861e73389e206629b712e"} Oct 13 13:11:21 crc kubenswrapper[4684]: I1013 13:11:21.889321 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzj9z"] Oct 13 13:11:21 crc kubenswrapper[4684]: W1013 13:11:21.912243 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef7652d6_3ac3_4739_a190_5b071c4d0839.slice/crio-429189d1168400eabe355971231bc6d745bf4c3f8fd72172bc520ecfa2cd27da WatchSource:0}: Error finding container 429189d1168400eabe355971231bc6d745bf4c3f8fd72172bc520ecfa2cd27da: Status 404 returned error can't find the container with id 429189d1168400eabe355971231bc6d745bf4c3f8fd72172bc520ecfa2cd27da Oct 13 13:11:22 crc kubenswrapper[4684]: I1013 13:11:22.797557 4684 generic.go:334] "Generic (PLEG): container finished" podID="ef7652d6-3ac3-4739-a190-5b071c4d0839" containerID="231f83a9bee05970b02d47cfdef8a687a6b3a68f7b3fbb1c8879b15f28e9c3d2" exitCode=0 Oct 13 13:11:22 crc kubenswrapper[4684]: I1013 13:11:22.797660 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzj9z" event={"ID":"ef7652d6-3ac3-4739-a190-5b071c4d0839","Type":"ContainerDied","Data":"231f83a9bee05970b02d47cfdef8a687a6b3a68f7b3fbb1c8879b15f28e9c3d2"} Oct 13 13:11:22 crc kubenswrapper[4684]: I1013 13:11:22.797983 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzj9z" event={"ID":"ef7652d6-3ac3-4739-a190-5b071c4d0839","Type":"ContainerStarted","Data":"429189d1168400eabe355971231bc6d745bf4c3f8fd72172bc520ecfa2cd27da"} Oct 13 13:11:22 crc kubenswrapper[4684]: I1013 13:11:22.799593 4684 generic.go:334] "Generic (PLEG): container finished" podID="4fc769db-2b2f-431c-bef7-1b3c46fe628c" containerID="9000e42ae19105fce8a17aa553f827307b23536079b47ac4878eede9e3168901" exitCode=0 Oct 13 13:11:22 crc kubenswrapper[4684]: I1013 13:11:22.799658 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qdd" event={"ID":"4fc769db-2b2f-431c-bef7-1b3c46fe628c","Type":"ContainerDied","Data":"9000e42ae19105fce8a17aa553f827307b23536079b47ac4878eede9e3168901"} Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.578711 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9mv7b"] Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.579608 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.582902 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.587372 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mv7b"] Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.663070 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-catalog-content\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.663451 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-utilities\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.663567 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf5k4\" (UniqueName: \"kubernetes.io/projected/22ed76a4-376f-43eb-89d1-995c25747c97-kube-api-access-nf5k4\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.764795 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-catalog-content\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.764861 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-utilities\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.764892 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf5k4\" (UniqueName: \"kubernetes.io/projected/22ed76a4-376f-43eb-89d1-995c25747c97-kube-api-access-nf5k4\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.765638 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-catalog-content\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.765678 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-utilities\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.775809 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2dqll"] Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.779203 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.781318 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.788001 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dqll"] Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.790882 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf5k4\" (UniqueName: \"kubernetes.io/projected/22ed76a4-376f-43eb-89d1-995c25747c97-kube-api-access-nf5k4\") pod \"community-operators-9mv7b\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.816424 4684 generic.go:334] "Generic (PLEG): container finished" podID="4fc769db-2b2f-431c-bef7-1b3c46fe628c" containerID="d804b2088ee279c00c5b51e7dab137cf0963499b9760cd76796c6e421d122e9a" exitCode=0 Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.816475 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qdd" event={"ID":"4fc769db-2b2f-431c-bef7-1b3c46fe628c","Type":"ContainerDied","Data":"d804b2088ee279c00c5b51e7dab137cf0963499b9760cd76796c6e421d122e9a"} Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.866528 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-catalog-content\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.866627 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwq2z\" (UniqueName: \"kubernetes.io/projected/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-kube-api-access-rwq2z\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.866701 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-utilities\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.916444 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.967627 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-utilities\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.967981 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-utilities\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.968037 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-catalog-content\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.968083 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwq2z\" (UniqueName: \"kubernetes.io/projected/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-kube-api-access-rwq2z\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.968842 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-catalog-content\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:23 crc kubenswrapper[4684]: I1013 13:11:23.989877 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwq2z\" (UniqueName: \"kubernetes.io/projected/e5497e7c-0895-4f48-bc23-89ed3b1b5fb5-kube-api-access-rwq2z\") pod \"certified-operators-2dqll\" (UID: \"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5\") " pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.123174 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mv7b"] Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.123690 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:24 crc kubenswrapper[4684]: W1013 13:11:24.134594 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22ed76a4_376f_43eb_89d1_995c25747c97.slice/crio-a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074 WatchSource:0}: Error finding container a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074: Status 404 returned error can't find the container with id a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074 Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.306472 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dqll"] Oct 13 13:11:24 crc kubenswrapper[4684]: W1013 13:11:24.340785 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5497e7c_0895_4f48_bc23_89ed3b1b5fb5.slice/crio-9648a1e7483af5fc04ce49f0a4581a447e4407c49c0a43556cbaac18c97e540a WatchSource:0}: Error finding container 9648a1e7483af5fc04ce49f0a4581a447e4407c49c0a43556cbaac18c97e540a: Status 404 returned error can't find the container with id 9648a1e7483af5fc04ce49f0a4581a447e4407c49c0a43556cbaac18c97e540a Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.823674 4684 generic.go:334] "Generic (PLEG): container finished" podID="22ed76a4-376f-43eb-89d1-995c25747c97" containerID="2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797" exitCode=0 Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.823966 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mv7b" event={"ID":"22ed76a4-376f-43eb-89d1-995c25747c97","Type":"ContainerDied","Data":"2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797"} Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.824404 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mv7b" event={"ID":"22ed76a4-376f-43eb-89d1-995c25747c97","Type":"ContainerStarted","Data":"a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074"} Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.832105 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5qdd" event={"ID":"4fc769db-2b2f-431c-bef7-1b3c46fe628c","Type":"ContainerStarted","Data":"ec9195c23303b4a03a40d8a10d9041aca3c6be75da50b82edf6217beadb7a14c"} Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.835236 4684 generic.go:334] "Generic (PLEG): container finished" podID="ef7652d6-3ac3-4739-a190-5b071c4d0839" containerID="9a3c6d61bccd51f4e2e04d8e32b728ced2ae43da9effab0ef7793584d26b746d" exitCode=0 Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.835308 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzj9z" event={"ID":"ef7652d6-3ac3-4739-a190-5b071c4d0839","Type":"ContainerDied","Data":"9a3c6d61bccd51f4e2e04d8e32b728ced2ae43da9effab0ef7793584d26b746d"} Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.836895 4684 generic.go:334] "Generic (PLEG): container finished" podID="e5497e7c-0895-4f48-bc23-89ed3b1b5fb5" containerID="5f6398c97f14135a7c60e51d9588b734b0ba5f62b892cc95a6bb8d67eb2a2309" exitCode=0 Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.836956 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dqll" event={"ID":"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5","Type":"ContainerDied","Data":"5f6398c97f14135a7c60e51d9588b734b0ba5f62b892cc95a6bb8d67eb2a2309"} Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.836984 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dqll" event={"ID":"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5","Type":"ContainerStarted","Data":"9648a1e7483af5fc04ce49f0a4581a447e4407c49c0a43556cbaac18c97e540a"} Oct 13 13:11:24 crc kubenswrapper[4684]: I1013 13:11:24.898344 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h5qdd" podStartSLOduration=2.424117624 podStartE2EDuration="3.898325472s" podCreationTimestamp="2025-10-13 13:11:21 +0000 UTC" firstStartedPulling="2025-10-13 13:11:22.800502211 +0000 UTC m=+237.367886281" lastFinishedPulling="2025-10-13 13:11:24.274710059 +0000 UTC m=+238.842094129" observedRunningTime="2025-10-13 13:11:24.892950735 +0000 UTC m=+239.460334795" watchObservedRunningTime="2025-10-13 13:11:24.898325472 +0000 UTC m=+239.465709542" Oct 13 13:11:26 crc kubenswrapper[4684]: I1013 13:11:26.850025 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzj9z" event={"ID":"ef7652d6-3ac3-4739-a190-5b071c4d0839","Type":"ContainerStarted","Data":"804add3e671aaa24a09fc1307c014b93be8b46061f45b4fd609e003462baf247"} Oct 13 13:11:26 crc kubenswrapper[4684]: I1013 13:11:26.851610 4684 generic.go:334] "Generic (PLEG): container finished" podID="22ed76a4-376f-43eb-89d1-995c25747c97" containerID="4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958" exitCode=0 Oct 13 13:11:26 crc kubenswrapper[4684]: I1013 13:11:26.851754 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mv7b" event={"ID":"22ed76a4-376f-43eb-89d1-995c25747c97","Type":"ContainerDied","Data":"4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958"} Oct 13 13:11:26 crc kubenswrapper[4684]: I1013 13:11:26.856096 4684 generic.go:334] "Generic (PLEG): container finished" podID="e5497e7c-0895-4f48-bc23-89ed3b1b5fb5" containerID="ea6250003b253d15d7cc85a699b5782e27c8d6ddc21de157d6a27a657c7ff657" exitCode=0 Oct 13 13:11:26 crc kubenswrapper[4684]: I1013 13:11:26.856143 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dqll" event={"ID":"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5","Type":"ContainerDied","Data":"ea6250003b253d15d7cc85a699b5782e27c8d6ddc21de157d6a27a657c7ff657"} Oct 13 13:11:26 crc kubenswrapper[4684]: I1013 13:11:26.868447 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qzj9z" podStartSLOduration=3.325544311 podStartE2EDuration="5.868432911s" podCreationTimestamp="2025-10-13 13:11:21 +0000 UTC" firstStartedPulling="2025-10-13 13:11:22.799862861 +0000 UTC m=+237.367246921" lastFinishedPulling="2025-10-13 13:11:25.342751441 +0000 UTC m=+239.910135521" observedRunningTime="2025-10-13 13:11:26.867111579 +0000 UTC m=+241.434495649" watchObservedRunningTime="2025-10-13 13:11:26.868432911 +0000 UTC m=+241.435816981" Oct 13 13:11:27 crc kubenswrapper[4684]: I1013 13:11:27.863233 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dqll" event={"ID":"e5497e7c-0895-4f48-bc23-89ed3b1b5fb5","Type":"ContainerStarted","Data":"ba1a27bcc7d4c2acf9c71255cf13da7a6b582866fd12d3bfbc5a910709fdaf6e"} Oct 13 13:11:27 crc kubenswrapper[4684]: I1013 13:11:27.866971 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mv7b" event={"ID":"22ed76a4-376f-43eb-89d1-995c25747c97","Type":"ContainerStarted","Data":"199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136"} Oct 13 13:11:27 crc kubenswrapper[4684]: I1013 13:11:27.910491 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2dqll" podStartSLOduration=2.430107428 podStartE2EDuration="4.910468182s" podCreationTimestamp="2025-10-13 13:11:23 +0000 UTC" firstStartedPulling="2025-10-13 13:11:24.839139951 +0000 UTC m=+239.406524021" lastFinishedPulling="2025-10-13 13:11:27.319500705 +0000 UTC m=+241.886884775" observedRunningTime="2025-10-13 13:11:27.907115548 +0000 UTC m=+242.474499628" watchObservedRunningTime="2025-10-13 13:11:27.910468182 +0000 UTC m=+242.477852252" Oct 13 13:11:27 crc kubenswrapper[4684]: I1013 13:11:27.938995 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9mv7b" podStartSLOduration=2.353767222 podStartE2EDuration="4.938955388s" podCreationTimestamp="2025-10-13 13:11:23 +0000 UTC" firstStartedPulling="2025-10-13 13:11:24.826137406 +0000 UTC m=+239.393521476" lastFinishedPulling="2025-10-13 13:11:27.411325582 +0000 UTC m=+241.978709642" observedRunningTime="2025-10-13 13:11:27.934859592 +0000 UTC m=+242.502243672" watchObservedRunningTime="2025-10-13 13:11:27.938955388 +0000 UTC m=+242.506339458" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.526194 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.527511 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.568031 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.710118 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.710180 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.752936 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.925277 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h5qdd" Oct 13 13:11:31 crc kubenswrapper[4684]: I1013 13:11:31.945346 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qzj9z" Oct 13 13:11:33 crc kubenswrapper[4684]: I1013 13:11:33.916936 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:33 crc kubenswrapper[4684]: I1013 13:11:33.917002 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:33 crc kubenswrapper[4684]: I1013 13:11:33.956557 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:34 crc kubenswrapper[4684]: I1013 13:11:34.124568 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:34 crc kubenswrapper[4684]: I1013 13:11:34.124655 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:34 crc kubenswrapper[4684]: I1013 13:11:34.167320 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:11:34 crc kubenswrapper[4684]: I1013 13:11:34.945452 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:11:34 crc kubenswrapper[4684]: I1013 13:11:34.953185 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2dqll" Oct 13 13:13:00 crc kubenswrapper[4684]: I1013 13:13:00.559768 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:13:00 crc kubenswrapper[4684]: I1013 13:13:00.560395 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:13:30 crc kubenswrapper[4684]: I1013 13:13:30.559687 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:13:30 crc kubenswrapper[4684]: I1013 13:13:30.560403 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.559788 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.560564 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.560775 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.562399 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.562555 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851" gracePeriod=600 Oct 13 13:14:00 crc kubenswrapper[4684]: E1013 13:14:00.587602 4684 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode54ad64a_6df7_4082_afde_d56463121b3f.slice/crio-b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851.scope\": RecentStats: unable to find data in memory cache]" Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.811552 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851" exitCode=0 Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.811606 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851"} Oct 13 13:14:00 crc kubenswrapper[4684]: I1013 13:14:00.811666 4684 scope.go:117] "RemoveContainer" containerID="c378d71a221635f2bfb512ae8b16fec9df404f89baa3b2ee057c8499dae130a5" Oct 13 13:14:01 crc kubenswrapper[4684]: I1013 13:14:01.819065 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"5d7bac5989bd561f1d776c849f654aeb770962f36d566b0607016c06463a1f0b"} Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.140084 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk"] Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.141400 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.143753 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.144252 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.147954 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk"] Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.290743 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-secret-volume\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.290794 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-config-volume\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.290819 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8f68\" (UniqueName: \"kubernetes.io/projected/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-kube-api-access-g8f68\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.391788 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-secret-volume\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.391830 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-config-volume\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.391850 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8f68\" (UniqueName: \"kubernetes.io/projected/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-kube-api-access-g8f68\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.392750 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-config-volume\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.398103 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-secret-volume\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.420938 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8f68\" (UniqueName: \"kubernetes.io/projected/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-kube-api-access-g8f68\") pod \"collect-profiles-29339355-jtjhk\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.498522 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:00 crc kubenswrapper[4684]: I1013 13:15:00.713575 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk"] Oct 13 13:15:01 crc kubenswrapper[4684]: I1013 13:15:01.201231 4684 generic.go:334] "Generic (PLEG): container finished" podID="14b78ff6-21ce-4b5b-a6bf-915e28c2956b" containerID="d98b9e339e90cd06a6a2cb70490f05968117d1e8d9e6ecc8aaf857de851d294e" exitCode=0 Oct 13 13:15:01 crc kubenswrapper[4684]: I1013 13:15:01.201281 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" event={"ID":"14b78ff6-21ce-4b5b-a6bf-915e28c2956b","Type":"ContainerDied","Data":"d98b9e339e90cd06a6a2cb70490f05968117d1e8d9e6ecc8aaf857de851d294e"} Oct 13 13:15:01 crc kubenswrapper[4684]: I1013 13:15:01.201313 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" event={"ID":"14b78ff6-21ce-4b5b-a6bf-915e28c2956b","Type":"ContainerStarted","Data":"7f8c3e4586daaa44887e6d8859089cf910f1661a55ece4d23fef7a889e522bda"} Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.505725 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.624888 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-config-volume\") pod \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.624958 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-secret-volume\") pod \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.625003 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8f68\" (UniqueName: \"kubernetes.io/projected/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-kube-api-access-g8f68\") pod \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\" (UID: \"14b78ff6-21ce-4b5b-a6bf-915e28c2956b\") " Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.625891 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-config-volume" (OuterVolumeSpecName: "config-volume") pod "14b78ff6-21ce-4b5b-a6bf-915e28c2956b" (UID: "14b78ff6-21ce-4b5b-a6bf-915e28c2956b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.631023 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "14b78ff6-21ce-4b5b-a6bf-915e28c2956b" (UID: "14b78ff6-21ce-4b5b-a6bf-915e28c2956b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.631423 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-kube-api-access-g8f68" (OuterVolumeSpecName: "kube-api-access-g8f68") pod "14b78ff6-21ce-4b5b-a6bf-915e28c2956b" (UID: "14b78ff6-21ce-4b5b-a6bf-915e28c2956b"). InnerVolumeSpecName "kube-api-access-g8f68". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.726695 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.726752 4684 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:15:02 crc kubenswrapper[4684]: I1013 13:15:02.726771 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8f68\" (UniqueName: \"kubernetes.io/projected/14b78ff6-21ce-4b5b-a6bf-915e28c2956b-kube-api-access-g8f68\") on node \"crc\" DevicePath \"\"" Oct 13 13:15:03 crc kubenswrapper[4684]: I1013 13:15:03.219216 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" event={"ID":"14b78ff6-21ce-4b5b-a6bf-915e28c2956b","Type":"ContainerDied","Data":"7f8c3e4586daaa44887e6d8859089cf910f1661a55ece4d23fef7a889e522bda"} Oct 13 13:15:03 crc kubenswrapper[4684]: I1013 13:15:03.219264 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f8c3e4586daaa44887e6d8859089cf910f1661a55ece4d23fef7a889e522bda" Oct 13 13:15:03 crc kubenswrapper[4684]: I1013 13:15:03.219360 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.056383 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wf99p"] Oct 13 13:15:14 crc kubenswrapper[4684]: E1013 13:15:14.057349 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14b78ff6-21ce-4b5b-a6bf-915e28c2956b" containerName="collect-profiles" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.057371 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="14b78ff6-21ce-4b5b-a6bf-915e28c2956b" containerName="collect-profiles" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.057539 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="14b78ff6-21ce-4b5b-a6bf-915e28c2956b" containerName="collect-profiles" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.058221 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.081784 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wf99p"] Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177461 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177520 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-trusted-ca\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177569 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-bound-sa-token\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177610 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-registry-tls\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177638 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pnkk\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-kube-api-access-8pnkk\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177678 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177868 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.177949 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-registry-certificates\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.198687 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279376 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-bound-sa-token\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279440 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-registry-tls\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279467 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pnkk\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-kube-api-access-8pnkk\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279494 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279532 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279552 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-registry-certificates\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.279573 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-trusted-ca\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.280740 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-trusted-ca\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.280985 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.281588 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-registry-certificates\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.293852 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.295313 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-registry-tls\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.299734 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-bound-sa-token\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.301241 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pnkk\" (UniqueName: \"kubernetes.io/projected/d9128d56-3aac-4e6d-a7f2-3977ad5abc59-kube-api-access-8pnkk\") pod \"image-registry-66df7c8f76-wf99p\" (UID: \"d9128d56-3aac-4e6d-a7f2-3977ad5abc59\") " pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.376735 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:14 crc kubenswrapper[4684]: I1013 13:15:14.557429 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wf99p"] Oct 13 13:15:15 crc kubenswrapper[4684]: I1013 13:15:15.286338 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" event={"ID":"d9128d56-3aac-4e6d-a7f2-3977ad5abc59","Type":"ContainerStarted","Data":"893c689f093b477636f49eeb8de642df734c63ed6d4fb822b96be5b0fd244995"} Oct 13 13:15:15 crc kubenswrapper[4684]: I1013 13:15:15.286632 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:15 crc kubenswrapper[4684]: I1013 13:15:15.286647 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" event={"ID":"d9128d56-3aac-4e6d-a7f2-3977ad5abc59","Type":"ContainerStarted","Data":"a3522e1acb3a6565119c5f66fb35eb9123dd8e437eacb09059af143975283417"} Oct 13 13:15:15 crc kubenswrapper[4684]: I1013 13:15:15.318789 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" podStartSLOduration=1.318768302 podStartE2EDuration="1.318768302s" podCreationTimestamp="2025-10-13 13:15:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:15:15.314775816 +0000 UTC m=+469.882159926" watchObservedRunningTime="2025-10-13 13:15:15.318768302 +0000 UTC m=+469.886152382" Oct 13 13:15:34 crc kubenswrapper[4684]: I1013 13:15:34.383249 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-wf99p" Oct 13 13:15:34 crc kubenswrapper[4684]: I1013 13:15:34.441406 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qm5mj"] Oct 13 13:15:59 crc kubenswrapper[4684]: I1013 13:15:59.486983 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" podUID="e7af13a9-fc75-4bdb-931f-b68bb3813c09" containerName="registry" containerID="cri-o://a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924" gracePeriod=30 Oct 13 13:15:59 crc kubenswrapper[4684]: I1013 13:15:59.863283 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036498 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-bound-sa-token\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036591 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7af13a9-fc75-4bdb-931f-b68bb3813c09-installation-pull-secrets\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036650 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7af13a9-fc75-4bdb-931f-b68bb3813c09-ca-trust-extracted\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036690 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-certificates\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036739 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-trusted-ca\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036759 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-tls\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036966 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.036990 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvjg2\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-kube-api-access-nvjg2\") pod \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\" (UID: \"e7af13a9-fc75-4bdb-931f-b68bb3813c09\") " Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.038245 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.038708 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.044651 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.044999 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-kube-api-access-nvjg2" (OuterVolumeSpecName: "kube-api-access-nvjg2") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "kube-api-access-nvjg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.044725 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7af13a9-fc75-4bdb-931f-b68bb3813c09-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.045391 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.054854 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.066893 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7af13a9-fc75-4bdb-931f-b68bb3813c09-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "e7af13a9-fc75-4bdb-931f-b68bb3813c09" (UID: "e7af13a9-fc75-4bdb-931f-b68bb3813c09"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138204 4684 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138663 4684 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7af13a9-fc75-4bdb-931f-b68bb3813c09-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138752 4684 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7af13a9-fc75-4bdb-931f-b68bb3813c09-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138809 4684 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138859 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7af13a9-fc75-4bdb-931f-b68bb3813c09-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138922 4684 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.138976 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvjg2\" (UniqueName: \"kubernetes.io/projected/e7af13a9-fc75-4bdb-931f-b68bb3813c09-kube-api-access-nvjg2\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.560055 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.560124 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.566553 4684 generic.go:334] "Generic (PLEG): container finished" podID="e7af13a9-fc75-4bdb-931f-b68bb3813c09" containerID="a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924" exitCode=0 Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.566596 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" event={"ID":"e7af13a9-fc75-4bdb-931f-b68bb3813c09","Type":"ContainerDied","Data":"a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924"} Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.566623 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" event={"ID":"e7af13a9-fc75-4bdb-931f-b68bb3813c09","Type":"ContainerDied","Data":"a8bda9a57e13b8a66b6b1339556ec090a9d4b80ba80b24e6fd3149290b47c76b"} Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.566641 4684 scope.go:117] "RemoveContainer" containerID="a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.566757 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-qm5mj" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.597544 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qm5mj"] Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.608086 4684 scope.go:117] "RemoveContainer" containerID="a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924" Oct 13 13:16:00 crc kubenswrapper[4684]: E1013 13:16:00.608852 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924\": container with ID starting with a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924 not found: ID does not exist" containerID="a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.608890 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924"} err="failed to get container status \"a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924\": rpc error: code = NotFound desc = could not find container \"a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924\": container with ID starting with a90ddd1423f2b09515587b9e0892bd863d55e5aa23598b1e0644fe74b1bc0924 not found: ID does not exist" Oct 13 13:16:00 crc kubenswrapper[4684]: I1013 13:16:00.609955 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-qm5mj"] Oct 13 13:16:02 crc kubenswrapper[4684]: I1013 13:16:02.359255 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7af13a9-fc75-4bdb-931f-b68bb3813c09" path="/var/lib/kubelet/pods/e7af13a9-fc75-4bdb-931f-b68bb3813c09/volumes" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.605396 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-58lpp"] Oct 13 13:16:24 crc kubenswrapper[4684]: E1013 13:16:24.606219 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7af13a9-fc75-4bdb-931f-b68bb3813c09" containerName="registry" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.606235 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7af13a9-fc75-4bdb-931f-b68bb3813c09" containerName="registry" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.606331 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7af13a9-fc75-4bdb-931f-b68bb3813c09" containerName="registry" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.606831 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.609158 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.609579 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.609735 4684 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-lvk7x" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.615605 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-58lpp"] Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.630624 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4xcpj"] Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.631475 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-4xcpj" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.633450 4684 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-z4skp" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.644474 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-ldckd"] Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.645162 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.649117 4684 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-kfjhr" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.662453 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4xcpj"] Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.677821 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-ldckd"] Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.760957 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8thxq\" (UniqueName: \"kubernetes.io/projected/f65d7f2f-54ed-4d01-b143-82edecc32788-kube-api-access-8thxq\") pod \"cert-manager-5b446d88c5-4xcpj\" (UID: \"f65d7f2f-54ed-4d01-b143-82edecc32788\") " pod="cert-manager/cert-manager-5b446d88c5-4xcpj" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.761039 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxcmp\" (UniqueName: \"kubernetes.io/projected/d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4-kube-api-access-lxcmp\") pod \"cert-manager-cainjector-7f985d654d-58lpp\" (UID: \"d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.761095 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5wkw\" (UniqueName: \"kubernetes.io/projected/5689e64c-b30a-4009-ad81-2ace50352b94-kube-api-access-z5wkw\") pod \"cert-manager-webhook-5655c58dd6-ldckd\" (UID: \"5689e64c-b30a-4009-ad81-2ace50352b94\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.862671 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8thxq\" (UniqueName: \"kubernetes.io/projected/f65d7f2f-54ed-4d01-b143-82edecc32788-kube-api-access-8thxq\") pod \"cert-manager-5b446d88c5-4xcpj\" (UID: \"f65d7f2f-54ed-4d01-b143-82edecc32788\") " pod="cert-manager/cert-manager-5b446d88c5-4xcpj" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.862747 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxcmp\" (UniqueName: \"kubernetes.io/projected/d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4-kube-api-access-lxcmp\") pod \"cert-manager-cainjector-7f985d654d-58lpp\" (UID: \"d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.862792 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5wkw\" (UniqueName: \"kubernetes.io/projected/5689e64c-b30a-4009-ad81-2ace50352b94-kube-api-access-z5wkw\") pod \"cert-manager-webhook-5655c58dd6-ldckd\" (UID: \"5689e64c-b30a-4009-ad81-2ace50352b94\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.883246 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxcmp\" (UniqueName: \"kubernetes.io/projected/d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4-kube-api-access-lxcmp\") pod \"cert-manager-cainjector-7f985d654d-58lpp\" (UID: \"d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.883448 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5wkw\" (UniqueName: \"kubernetes.io/projected/5689e64c-b30a-4009-ad81-2ace50352b94-kube-api-access-z5wkw\") pod \"cert-manager-webhook-5655c58dd6-ldckd\" (UID: \"5689e64c-b30a-4009-ad81-2ace50352b94\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.887564 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8thxq\" (UniqueName: \"kubernetes.io/projected/f65d7f2f-54ed-4d01-b143-82edecc32788-kube-api-access-8thxq\") pod \"cert-manager-5b446d88c5-4xcpj\" (UID: \"f65d7f2f-54ed-4d01-b143-82edecc32788\") " pod="cert-manager/cert-manager-5b446d88c5-4xcpj" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.931135 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.947604 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-4xcpj" Oct 13 13:16:24 crc kubenswrapper[4684]: I1013 13:16:24.961298 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.179486 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-58lpp"] Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.196980 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.248192 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-ldckd"] Oct 13 13:16:25 crc kubenswrapper[4684]: W1013 13:16:25.253999 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5689e64c_b30a_4009_ad81_2ace50352b94.slice/crio-6e491d3f11f89571f4e207a59899d6a2924238b6305e4e08ff6462522fd0991b WatchSource:0}: Error finding container 6e491d3f11f89571f4e207a59899d6a2924238b6305e4e08ff6462522fd0991b: Status 404 returned error can't find the container with id 6e491d3f11f89571f4e207a59899d6a2924238b6305e4e08ff6462522fd0991b Oct 13 13:16:25 crc kubenswrapper[4684]: W1013 13:16:25.470850 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf65d7f2f_54ed_4d01_b143_82edecc32788.slice/crio-622463ccaafc5cc1973d30b9d4192e1c91df7403ffd45089a9259ef46a740a0e WatchSource:0}: Error finding container 622463ccaafc5cc1973d30b9d4192e1c91df7403ffd45089a9259ef46a740a0e: Status 404 returned error can't find the container with id 622463ccaafc5cc1973d30b9d4192e1c91df7403ffd45089a9259ef46a740a0e Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.471743 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-4xcpj"] Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.710048 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" event={"ID":"d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4","Type":"ContainerStarted","Data":"9a59d4ea26382ea2372b55e0ed73c4da9430e5685d2ef81e0498df5dab99dd47"} Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.712251 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" event={"ID":"5689e64c-b30a-4009-ad81-2ace50352b94","Type":"ContainerStarted","Data":"6e491d3f11f89571f4e207a59899d6a2924238b6305e4e08ff6462522fd0991b"} Oct 13 13:16:25 crc kubenswrapper[4684]: I1013 13:16:25.713399 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-4xcpj" event={"ID":"f65d7f2f-54ed-4d01-b143-82edecc32788","Type":"ContainerStarted","Data":"622463ccaafc5cc1973d30b9d4192e1c91df7403ffd45089a9259ef46a740a0e"} Oct 13 13:16:27 crc kubenswrapper[4684]: I1013 13:16:27.730653 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" event={"ID":"5689e64c-b30a-4009-ad81-2ace50352b94","Type":"ContainerStarted","Data":"3ab1fe79d6a549932433dd9d6d11d07165d8353e06db7af34382ca427bf3dc84"} Oct 13 13:16:27 crc kubenswrapper[4684]: I1013 13:16:27.731260 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:27 crc kubenswrapper[4684]: I1013 13:16:27.746327 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" podStartSLOduration=1.51120692 podStartE2EDuration="3.746309743s" podCreationTimestamp="2025-10-13 13:16:24 +0000 UTC" firstStartedPulling="2025-10-13 13:16:25.256137998 +0000 UTC m=+539.823522068" lastFinishedPulling="2025-10-13 13:16:27.491240811 +0000 UTC m=+542.058624891" observedRunningTime="2025-10-13 13:16:27.744209275 +0000 UTC m=+542.311593365" watchObservedRunningTime="2025-10-13 13:16:27.746309743 +0000 UTC m=+542.313693813" Oct 13 13:16:28 crc kubenswrapper[4684]: I1013 13:16:28.757401 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-4xcpj" podStartSLOduration=1.6182698979999999 podStartE2EDuration="4.757375254s" podCreationTimestamp="2025-10-13 13:16:24 +0000 UTC" firstStartedPulling="2025-10-13 13:16:25.473022314 +0000 UTC m=+540.040406384" lastFinishedPulling="2025-10-13 13:16:28.61212767 +0000 UTC m=+543.179511740" observedRunningTime="2025-10-13 13:16:28.75414224 +0000 UTC m=+543.321526320" watchObservedRunningTime="2025-10-13 13:16:28.757375254 +0000 UTC m=+543.324759344" Oct 13 13:16:29 crc kubenswrapper[4684]: I1013 13:16:29.748749 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-4xcpj" event={"ID":"f65d7f2f-54ed-4d01-b143-82edecc32788","Type":"ContainerStarted","Data":"d646b7c37e462a97c394d0dd061ef9258e7718f12292ff2a87db9bef794941b2"} Oct 13 13:16:29 crc kubenswrapper[4684]: I1013 13:16:29.751049 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" event={"ID":"d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4","Type":"ContainerStarted","Data":"fe16a2076be7f56f5f05b00ea3c58c7e460fa2d640e9489f8bea4ae09e1aca2a"} Oct 13 13:16:29 crc kubenswrapper[4684]: I1013 13:16:29.776462 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-58lpp" podStartSLOduration=2.4092320210000002 podStartE2EDuration="5.776437173s" podCreationTimestamp="2025-10-13 13:16:24 +0000 UTC" firstStartedPulling="2025-10-13 13:16:25.196449391 +0000 UTC m=+539.763833461" lastFinishedPulling="2025-10-13 13:16:28.563654543 +0000 UTC m=+543.131038613" observedRunningTime="2025-10-13 13:16:29.775672348 +0000 UTC m=+544.343056428" watchObservedRunningTime="2025-10-13 13:16:29.776437173 +0000 UTC m=+544.343821253" Oct 13 13:16:30 crc kubenswrapper[4684]: I1013 13:16:30.560539 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:16:30 crc kubenswrapper[4684]: I1013 13:16:30.560610 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:16:34 crc kubenswrapper[4684]: I1013 13:16:34.964647 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-ldckd" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.165764 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9sq8c"] Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166219 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-controller" containerID="cri-o://6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166286 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="nbdb" containerID="cri-o://36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166379 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-acl-logging" containerID="cri-o://cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166419 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-node" containerID="cri-o://a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166451 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="sbdb" containerID="cri-o://531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166636 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.166649 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="northd" containerID="cri-o://103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.212046 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" containerID="cri-o://d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" gracePeriod=30 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.463221 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/3.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.465190 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovn-acl-logging/0.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.465549 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovn-controller/0.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.465838 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.551761 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-plsgq"] Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.551977 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-node" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.551990 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-node" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.551997 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-acl-logging" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552003 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-acl-logging" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552013 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552018 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552026 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kubecfg-setup" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552033 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kubecfg-setup" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552043 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552048 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552057 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552062 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552068 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552074 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552084 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-ovn-metrics" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552090 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-ovn-metrics" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552102 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="sbdb" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552108 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="sbdb" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552116 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="nbdb" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552122 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="nbdb" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552132 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="northd" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552137 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="northd" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552214 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-ovn-metrics" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552224 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552232 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="sbdb" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552239 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552245 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="kube-rbac-proxy-node" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552254 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552260 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552269 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovn-acl-logging" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552276 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="nbdb" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552283 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="northd" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552357 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552365 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552450 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552459 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.552540 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.552547 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerName="ovnkube-controller" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.553838 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.611971 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-var-lib-cni-networks-ovn-kubernetes\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612025 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-systemd-units\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612052 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-ovn-kubernetes\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612093 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-slash\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612128 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-log-socket\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612152 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-systemd\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612193 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612195 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612211 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovn-node-metrics-cert\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612278 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-slash" (OuterVolumeSpecName: "host-slash") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612285 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-etc-openvswitch\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612309 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612336 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612362 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-log-socket" (OuterVolumeSpecName: "log-socket") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612367 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-script-lib\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612400 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-netd\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612456 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-openvswitch\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612497 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-config\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612573 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-ovn\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612619 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnvbx\" (UniqueName: \"kubernetes.io/projected/9b180ad7-c68c-4234-9b7b-aa938e5ad590-kube-api-access-mnvbx\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612664 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-netns\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612702 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-bin\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612740 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-node-log\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612779 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-env-overrides\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612815 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-kubelet\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.612846 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-var-lib-openvswitch\") pod \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\" (UID: \"9b180ad7-c68c-4234-9b7b-aa938e5ad590\") " Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613139 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613223 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613249 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-node-log" (OuterVolumeSpecName: "node-log") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613270 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613292 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613328 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613374 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613438 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613453 4684 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613516 4684 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613548 4684 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613575 4684 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613598 4684 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-node-log\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613600 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613624 4684 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613634 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613649 4684 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613678 4684 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613705 4684 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613729 4684 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-slash\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613752 4684 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-log-socket\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613779 4684 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613803 4684 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.613993 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.617889 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b180ad7-c68c-4234-9b7b-aa938e5ad590-kube-api-access-mnvbx" (OuterVolumeSpecName: "kube-api-access-mnvbx") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "kube-api-access-mnvbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.619484 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.629766 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "9b180ad7-c68c-4234-9b7b-aa938e5ad590" (UID: "9b180ad7-c68c-4234-9b7b-aa938e5ad590"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714328 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-cni-bin\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714383 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-ovnkube-config\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714401 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714423 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-log-socket\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714492 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq2cn\" (UniqueName: \"kubernetes.io/projected/229bdc70-cc5f-459e-b375-59e83a2c7765-kube-api-access-lq2cn\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714596 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-var-lib-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714639 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-cni-netd\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714666 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-ovn\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714690 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-slash\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714714 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/229bdc70-cc5f-459e-b375-59e83a2c7765-ovn-node-metrics-cert\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714739 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-env-overrides\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714761 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-node-log\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714783 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-kubelet\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714813 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714837 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-run-ovn-kubernetes\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714856 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-ovnkube-script-lib\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714881 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-systemd\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714964 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-systemd-units\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.714986 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-run-netns\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715014 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-etc-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715059 4684 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715074 4684 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715086 4684 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9b180ad7-c68c-4234-9b7b-aa938e5ad590-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715098 4684 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715110 4684 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715124 4684 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9b180ad7-c68c-4234-9b7b-aa938e5ad590-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.715135 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnvbx\" (UniqueName: \"kubernetes.io/projected/9b180ad7-c68c-4234-9b7b-aa938e5ad590-kube-api-access-mnvbx\") on node \"crc\" DevicePath \"\"" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.803938 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovnkube-controller/3.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.807657 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovn-acl-logging/0.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.808618 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-9sq8c_9b180ad7-c68c-4234-9b7b-aa938e5ad590/ovn-controller/0.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809389 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" exitCode=0 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809451 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" exitCode=0 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809476 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" exitCode=0 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809492 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" exitCode=0 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809506 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" exitCode=0 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809524 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" exitCode=0 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809539 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" exitCode=143 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809557 4684 generic.go:334] "Generic (PLEG): container finished" podID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" exitCode=143 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809593 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809641 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809687 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809711 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809731 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809750 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809771 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809791 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809808 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809821 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809833 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809844 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809855 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809866 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809883 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809898 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809959 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.809984 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810000 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810013 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810025 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810037 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810050 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810062 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810074 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810087 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810102 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810123 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810147 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810165 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810180 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810195 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810211 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810226 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810242 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810247 4684 scope.go:117] "RemoveContainer" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810258 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810273 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810288 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810311 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9sq8c" event={"ID":"9b180ad7-c68c-4234-9b7b-aa938e5ad590","Type":"ContainerDied","Data":"2d185939a58a3231b24507b5d66c1e44a27830c1e6ff09fa64e1cd3477b40a44"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810335 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810353 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810369 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810386 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810401 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810418 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810434 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810448 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810464 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.810480 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.814792 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/2.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816047 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/1.log" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816092 4684 generic.go:334] "Generic (PLEG): container finished" podID="eb2c3381-fecf-46e7-a034-d3c560dff35e" containerID="623bea441e3569a23040732a3943af02bd10ca1a80181e4ccb673180bd26774c" exitCode=2 Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816153 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerDied","Data":"623bea441e3569a23040732a3943af02bd10ca1a80181e4ccb673180bd26774c"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816242 4684 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c"} Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816287 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-log-socket\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816366 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq2cn\" (UniqueName: \"kubernetes.io/projected/229bdc70-cc5f-459e-b375-59e83a2c7765-kube-api-access-lq2cn\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816441 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-var-lib-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816454 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-log-socket\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816505 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-cni-netd\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816563 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-ovn\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816626 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-cni-netd\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816575 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-var-lib-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816640 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-slash\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816702 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-slash\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816723 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-ovn\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816738 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/229bdc70-cc5f-459e-b375-59e83a2c7765-ovn-node-metrics-cert\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.816892 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-env-overrides\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817012 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-node-log\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817064 4684 scope.go:117] "RemoveContainer" containerID="623bea441e3569a23040732a3943af02bd10ca1a80181e4ccb673180bd26774c" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817099 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-kubelet\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817171 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-run-ovn-kubernetes\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817230 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-kubelet\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817234 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817294 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817185 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-node-log\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817311 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-run-ovn-kubernetes\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817333 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-ovnkube-script-lib\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: E1013 13:16:35.817549 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-r7wd2_openshift-multus(eb2c3381-fecf-46e7-a034-d3c560dff35e)\"" pod="openshift-multus/multus-r7wd2" podUID="eb2c3381-fecf-46e7-a034-d3c560dff35e" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817748 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-systemd\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817838 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-systemd-units\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817836 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-systemd\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817928 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-run-netns\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.817945 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-systemd-units\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818000 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-etc-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818035 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-run-netns\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818062 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-env-overrides\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818109 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-etc-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818084 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-cni-bin\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818143 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-host-cni-bin\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818191 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-ovnkube-config\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818251 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.818388 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/229bdc70-cc5f-459e-b375-59e83a2c7765-run-openvswitch\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.819233 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-ovnkube-script-lib\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.819501 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/229bdc70-cc5f-459e-b375-59e83a2c7765-ovnkube-config\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.825002 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/229bdc70-cc5f-459e-b375-59e83a2c7765-ovn-node-metrics-cert\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.844766 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq2cn\" (UniqueName: \"kubernetes.io/projected/229bdc70-cc5f-459e-b375-59e83a2c7765-kube-api-access-lq2cn\") pod \"ovnkube-node-plsgq\" (UID: \"229bdc70-cc5f-459e-b375-59e83a2c7765\") " pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.849850 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.868141 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.874339 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9sq8c"] Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.881412 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9sq8c"] Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.899305 4684 scope.go:117] "RemoveContainer" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.911874 4684 scope.go:117] "RemoveContainer" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.926993 4684 scope.go:117] "RemoveContainer" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.940208 4684 scope.go:117] "RemoveContainer" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.964602 4684 scope.go:117] "RemoveContainer" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.981014 4684 scope.go:117] "RemoveContainer" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" Oct 13 13:16:35 crc kubenswrapper[4684]: I1013 13:16:35.995076 4684 scope.go:117] "RemoveContainer" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.010915 4684 scope.go:117] "RemoveContainer" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.030814 4684 scope.go:117] "RemoveContainer" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.031138 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": container with ID starting with d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36 not found: ID does not exist" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.031172 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} err="failed to get container status \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": rpc error: code = NotFound desc = could not find container \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": container with ID starting with d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.031193 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.031838 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": container with ID starting with 906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32 not found: ID does not exist" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.031859 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} err="failed to get container status \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": rpc error: code = NotFound desc = could not find container \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": container with ID starting with 906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.031873 4684 scope.go:117] "RemoveContainer" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.032148 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": container with ID starting with 531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7 not found: ID does not exist" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.032176 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} err="failed to get container status \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": rpc error: code = NotFound desc = could not find container \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": container with ID starting with 531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.032191 4684 scope.go:117] "RemoveContainer" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.032448 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": container with ID starting with 36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38 not found: ID does not exist" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.032468 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} err="failed to get container status \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": rpc error: code = NotFound desc = could not find container \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": container with ID starting with 36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.032482 4684 scope.go:117] "RemoveContainer" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.032726 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": container with ID starting with 103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e not found: ID does not exist" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.032749 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} err="failed to get container status \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": rpc error: code = NotFound desc = could not find container \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": container with ID starting with 103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.032762 4684 scope.go:117] "RemoveContainer" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.033021 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": container with ID starting with 97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5 not found: ID does not exist" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.033043 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} err="failed to get container status \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": rpc error: code = NotFound desc = could not find container \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": container with ID starting with 97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.033057 4684 scope.go:117] "RemoveContainer" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.033568 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": container with ID starting with a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66 not found: ID does not exist" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.033587 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} err="failed to get container status \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": rpc error: code = NotFound desc = could not find container \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": container with ID starting with a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.033600 4684 scope.go:117] "RemoveContainer" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.034127 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": container with ID starting with cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96 not found: ID does not exist" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.034147 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} err="failed to get container status \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": rpc error: code = NotFound desc = could not find container \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": container with ID starting with cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.034162 4684 scope.go:117] "RemoveContainer" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.034508 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": container with ID starting with 6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52 not found: ID does not exist" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.034593 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} err="failed to get container status \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": rpc error: code = NotFound desc = could not find container \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": container with ID starting with 6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.034646 4684 scope.go:117] "RemoveContainer" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" Oct 13 13:16:36 crc kubenswrapper[4684]: E1013 13:16:36.035533 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": container with ID starting with 5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7 not found: ID does not exist" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.035555 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} err="failed to get container status \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": rpc error: code = NotFound desc = could not find container \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": container with ID starting with 5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.035568 4684 scope.go:117] "RemoveContainer" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.036727 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} err="failed to get container status \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": rpc error: code = NotFound desc = could not find container \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": container with ID starting with d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.036753 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037061 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} err="failed to get container status \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": rpc error: code = NotFound desc = could not find container \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": container with ID starting with 906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037082 4684 scope.go:117] "RemoveContainer" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037352 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} err="failed to get container status \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": rpc error: code = NotFound desc = could not find container \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": container with ID starting with 531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037372 4684 scope.go:117] "RemoveContainer" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037580 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} err="failed to get container status \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": rpc error: code = NotFound desc = could not find container \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": container with ID starting with 36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037601 4684 scope.go:117] "RemoveContainer" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037921 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} err="failed to get container status \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": rpc error: code = NotFound desc = could not find container \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": container with ID starting with 103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.037948 4684 scope.go:117] "RemoveContainer" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038145 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} err="failed to get container status \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": rpc error: code = NotFound desc = could not find container \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": container with ID starting with 97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038164 4684 scope.go:117] "RemoveContainer" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038438 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} err="failed to get container status \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": rpc error: code = NotFound desc = could not find container \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": container with ID starting with a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038455 4684 scope.go:117] "RemoveContainer" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038618 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} err="failed to get container status \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": rpc error: code = NotFound desc = could not find container \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": container with ID starting with cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038637 4684 scope.go:117] "RemoveContainer" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038921 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} err="failed to get container status \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": rpc error: code = NotFound desc = could not find container \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": container with ID starting with 6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.038949 4684 scope.go:117] "RemoveContainer" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.039188 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} err="failed to get container status \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": rpc error: code = NotFound desc = could not find container \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": container with ID starting with 5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.039215 4684 scope.go:117] "RemoveContainer" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.039519 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} err="failed to get container status \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": rpc error: code = NotFound desc = could not find container \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": container with ID starting with d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.039536 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.039807 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} err="failed to get container status \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": rpc error: code = NotFound desc = could not find container \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": container with ID starting with 906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.039831 4684 scope.go:117] "RemoveContainer" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.040043 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} err="failed to get container status \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": rpc error: code = NotFound desc = could not find container \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": container with ID starting with 531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.040070 4684 scope.go:117] "RemoveContainer" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.040303 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} err="failed to get container status \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": rpc error: code = NotFound desc = could not find container \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": container with ID starting with 36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.040323 4684 scope.go:117] "RemoveContainer" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.040515 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} err="failed to get container status \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": rpc error: code = NotFound desc = could not find container \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": container with ID starting with 103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.040543 4684 scope.go:117] "RemoveContainer" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041022 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} err="failed to get container status \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": rpc error: code = NotFound desc = could not find container \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": container with ID starting with 97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041041 4684 scope.go:117] "RemoveContainer" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041337 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} err="failed to get container status \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": rpc error: code = NotFound desc = could not find container \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": container with ID starting with a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041354 4684 scope.go:117] "RemoveContainer" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041558 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} err="failed to get container status \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": rpc error: code = NotFound desc = could not find container \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": container with ID starting with cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041583 4684 scope.go:117] "RemoveContainer" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041792 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} err="failed to get container status \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": rpc error: code = NotFound desc = could not find container \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": container with ID starting with 6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.041809 4684 scope.go:117] "RemoveContainer" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.042155 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} err="failed to get container status \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": rpc error: code = NotFound desc = could not find container \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": container with ID starting with 5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.042184 4684 scope.go:117] "RemoveContainer" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.042411 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} err="failed to get container status \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": rpc error: code = NotFound desc = could not find container \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": container with ID starting with d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.042429 4684 scope.go:117] "RemoveContainer" containerID="906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.042737 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32"} err="failed to get container status \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": rpc error: code = NotFound desc = could not find container \"906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32\": container with ID starting with 906974d1f8202ba12d8d0b6c20a598eafcee40fbe6af75c32a2e519ae1995b32 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.042756 4684 scope.go:117] "RemoveContainer" containerID="531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043059 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7"} err="failed to get container status \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": rpc error: code = NotFound desc = could not find container \"531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7\": container with ID starting with 531ad0b2f9118be06330f8d63cb9779a62ffb163338f9fe6b114d3948327cde7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043086 4684 scope.go:117] "RemoveContainer" containerID="36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043378 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38"} err="failed to get container status \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": rpc error: code = NotFound desc = could not find container \"36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38\": container with ID starting with 36294f6fc6d3475a57a45fa73762ae69ad05d040565f4985be7db99cbec07a38 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043398 4684 scope.go:117] "RemoveContainer" containerID="103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043596 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e"} err="failed to get container status \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": rpc error: code = NotFound desc = could not find container \"103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e\": container with ID starting with 103f6385a6cdd193eb6c07df53113e62f13e67e2e87d87aa193febaa7958080e not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043608 4684 scope.go:117] "RemoveContainer" containerID="97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043888 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5"} err="failed to get container status \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": rpc error: code = NotFound desc = could not find container \"97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5\": container with ID starting with 97027e7bed481227131234b53285e5f31136aaebb580edbf9760ee5c23974cd5 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.043925 4684 scope.go:117] "RemoveContainer" containerID="a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044103 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66"} err="failed to get container status \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": rpc error: code = NotFound desc = could not find container \"a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66\": container with ID starting with a20210e747ebc5e05192e37e6f6c620a2bc688c63f81a0bf0d544e96ad095f66 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044120 4684 scope.go:117] "RemoveContainer" containerID="cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044286 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96"} err="failed to get container status \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": rpc error: code = NotFound desc = could not find container \"cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96\": container with ID starting with cd381dd91bd8d593d511b5ed18aa65cf906a7f958b9273d4561c16ddbca48b96 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044302 4684 scope.go:117] "RemoveContainer" containerID="6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044456 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52"} err="failed to get container status \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": rpc error: code = NotFound desc = could not find container \"6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52\": container with ID starting with 6b4d64efc0aa7bc3947956b6b918619e9ada2c6dcb17616d9cf026149476bf52 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044482 4684 scope.go:117] "RemoveContainer" containerID="5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044683 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7"} err="failed to get container status \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": rpc error: code = NotFound desc = could not find container \"5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7\": container with ID starting with 5fcc76fbf58fdced58673fcced6856c67d02090b9b0d3211527077615557a0d7 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044701 4684 scope.go:117] "RemoveContainer" containerID="d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.044853 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36"} err="failed to get container status \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": rpc error: code = NotFound desc = could not find container \"d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36\": container with ID starting with d7b4251a974589c8a4f217af2655a0045a79975479a7aff1dcc51866a87c4b36 not found: ID does not exist" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.367982 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b180ad7-c68c-4234-9b7b-aa938e5ad590" path="/var/lib/kubelet/pods/9b180ad7-c68c-4234-9b7b-aa938e5ad590/volumes" Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.822229 4684 generic.go:334] "Generic (PLEG): container finished" podID="229bdc70-cc5f-459e-b375-59e83a2c7765" containerID="3b2ae046ff7562dd9612014a4f4fe21f0f0f14d0538d07ca63e0c49cc4986966" exitCode=0 Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.822303 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerDied","Data":"3b2ae046ff7562dd9612014a4f4fe21f0f0f14d0538d07ca63e0c49cc4986966"} Oct 13 13:16:36 crc kubenswrapper[4684]: I1013 13:16:36.822342 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"e94a3e243f48a92318783746aad9ccc55ba05b810b7db359832c841352a3819f"} Oct 13 13:16:37 crc kubenswrapper[4684]: I1013 13:16:37.831136 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"a27b7e628fde6c952383d4f2992f6a10c7b64d89310956bbf92145dd4831181c"} Oct 13 13:16:37 crc kubenswrapper[4684]: I1013 13:16:37.831415 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"ea378194ce70df2773c5af8805fc6e9609ecb1a2f3bb649a743aa0e61d62508a"} Oct 13 13:16:37 crc kubenswrapper[4684]: I1013 13:16:37.831426 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"57ca011bb916bb5fada6aaa22b5066f56b9648d24746053963ae72564e7bf922"} Oct 13 13:16:37 crc kubenswrapper[4684]: I1013 13:16:37.831437 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"ef8a742163253d833caab92b514b864a77163c9f35f3bd88a4c5a5cc9bb2bc82"} Oct 13 13:16:37 crc kubenswrapper[4684]: I1013 13:16:37.831446 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"d7c3c961d4908996f4ac9192b520631915dfbd011ad54c91123698f150de06b4"} Oct 13 13:16:38 crc kubenswrapper[4684]: I1013 13:16:38.842892 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"9745cdd8d69a84f4c792854d2297b3a50dd24617786719246f97e4cd2580cbdc"} Oct 13 13:16:40 crc kubenswrapper[4684]: I1013 13:16:40.861524 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"9e058858434312eebde3a22df38fa94802eaf2137d9c675601c5d0d34989347f"} Oct 13 13:16:42 crc kubenswrapper[4684]: I1013 13:16:42.878571 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" event={"ID":"229bdc70-cc5f-459e-b375-59e83a2c7765","Type":"ContainerStarted","Data":"265114c2b14f9ee0c02ec2aa993166dfc43aae8bcad9bf0e2bab032e5c73a5ae"} Oct 13 13:16:42 crc kubenswrapper[4684]: I1013 13:16:42.880697 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:42 crc kubenswrapper[4684]: I1013 13:16:42.880761 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:42 crc kubenswrapper[4684]: I1013 13:16:42.932955 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:42 crc kubenswrapper[4684]: I1013 13:16:42.935307 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" podStartSLOduration=7.935283194 podStartE2EDuration="7.935283194s" podCreationTimestamp="2025-10-13 13:16:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:16:42.933110315 +0000 UTC m=+557.500494425" watchObservedRunningTime="2025-10-13 13:16:42.935283194 +0000 UTC m=+557.502667304" Oct 13 13:16:43 crc kubenswrapper[4684]: I1013 13:16:43.886050 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:43 crc kubenswrapper[4684]: I1013 13:16:43.916544 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:16:48 crc kubenswrapper[4684]: I1013 13:16:48.350681 4684 scope.go:117] "RemoveContainer" containerID="623bea441e3569a23040732a3943af02bd10ca1a80181e4ccb673180bd26774c" Oct 13 13:16:48 crc kubenswrapper[4684]: E1013 13:16:48.351198 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-r7wd2_openshift-multus(eb2c3381-fecf-46e7-a034-d3c560dff35e)\"" pod="openshift-multus/multus-r7wd2" podUID="eb2c3381-fecf-46e7-a034-d3c560dff35e" Oct 13 13:16:59 crc kubenswrapper[4684]: I1013 13:16:59.352056 4684 scope.go:117] "RemoveContainer" containerID="623bea441e3569a23040732a3943af02bd10ca1a80181e4ccb673180bd26774c" Oct 13 13:16:59 crc kubenswrapper[4684]: I1013 13:16:59.990572 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/2.log" Oct 13 13:16:59 crc kubenswrapper[4684]: I1013 13:16:59.992165 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/1.log" Oct 13 13:16:59 crc kubenswrapper[4684]: I1013 13:16:59.992267 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r7wd2" event={"ID":"eb2c3381-fecf-46e7-a034-d3c560dff35e","Type":"ContainerStarted","Data":"78552ce77df51e5203b225321006ec88a8b46adabe34b02f81aadb15192b154f"} Oct 13 13:17:00 crc kubenswrapper[4684]: I1013 13:17:00.559540 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:17:00 crc kubenswrapper[4684]: I1013 13:17:00.559598 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:17:00 crc kubenswrapper[4684]: I1013 13:17:00.559640 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:17:00 crc kubenswrapper[4684]: I1013 13:17:00.560222 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5d7bac5989bd561f1d776c849f654aeb770962f36d566b0607016c06463a1f0b"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:17:00 crc kubenswrapper[4684]: I1013 13:17:00.560280 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://5d7bac5989bd561f1d776c849f654aeb770962f36d566b0607016c06463a1f0b" gracePeriod=600 Oct 13 13:17:01 crc kubenswrapper[4684]: I1013 13:17:01.002244 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="5d7bac5989bd561f1d776c849f654aeb770962f36d566b0607016c06463a1f0b" exitCode=0 Oct 13 13:17:01 crc kubenswrapper[4684]: I1013 13:17:01.002346 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"5d7bac5989bd561f1d776c849f654aeb770962f36d566b0607016c06463a1f0b"} Oct 13 13:17:01 crc kubenswrapper[4684]: I1013 13:17:01.002528 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"5c52fe2f8e685623bf18216a39b095d01b746cd93787ab68efae18b207ec65af"} Oct 13 13:17:01 crc kubenswrapper[4684]: I1013 13:17:01.002558 4684 scope.go:117] "RemoveContainer" containerID="b6dd768883ba78cafb92197d19e9353d53ffabce7a93163ce51b64d353a36851" Oct 13 13:17:05 crc kubenswrapper[4684]: I1013 13:17:05.893326 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-plsgq" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.117675 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt"] Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.119758 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.122645 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.140319 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt"] Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.287222 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.287393 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.287491 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trkbs\" (UniqueName: \"kubernetes.io/projected/03260d40-6a40-406f-b8a9-7898ae8a3b16-kube-api-access-trkbs\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.388402 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trkbs\" (UniqueName: \"kubernetes.io/projected/03260d40-6a40-406f-b8a9-7898ae8a3b16-kube-api-access-trkbs\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.388468 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.388515 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.388936 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.389115 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.407693 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trkbs\" (UniqueName: \"kubernetes.io/projected/03260d40-6a40-406f-b8a9-7898ae8a3b16-kube-api-access-trkbs\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.458178 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:11 crc kubenswrapper[4684]: I1013 13:17:11.702674 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt"] Oct 13 13:17:12 crc kubenswrapper[4684]: I1013 13:17:12.071697 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" event={"ID":"03260d40-6a40-406f-b8a9-7898ae8a3b16","Type":"ContainerStarted","Data":"d351cbb55c1bd0a8418f71ff1518e06be4363ba22030a5b8534ca14f458251e3"} Oct 13 13:17:12 crc kubenswrapper[4684]: I1013 13:17:12.071740 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" event={"ID":"03260d40-6a40-406f-b8a9-7898ae8a3b16","Type":"ContainerStarted","Data":"298a22d7258fe79cc7260cd0d6782b3af4d49be4a5452cb02a353a6bdf97a4bb"} Oct 13 13:17:13 crc kubenswrapper[4684]: I1013 13:17:13.081301 4684 generic.go:334] "Generic (PLEG): container finished" podID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerID="d351cbb55c1bd0a8418f71ff1518e06be4363ba22030a5b8534ca14f458251e3" exitCode=0 Oct 13 13:17:13 crc kubenswrapper[4684]: I1013 13:17:13.081367 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" event={"ID":"03260d40-6a40-406f-b8a9-7898ae8a3b16","Type":"ContainerDied","Data":"d351cbb55c1bd0a8418f71ff1518e06be4363ba22030a5b8534ca14f458251e3"} Oct 13 13:17:15 crc kubenswrapper[4684]: I1013 13:17:15.095269 4684 generic.go:334] "Generic (PLEG): container finished" podID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerID="016de455826b6d184ca5a16bd30bad35aa94dcae5c69223e94c02c90d5109bcb" exitCode=0 Oct 13 13:17:15 crc kubenswrapper[4684]: I1013 13:17:15.095343 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" event={"ID":"03260d40-6a40-406f-b8a9-7898ae8a3b16","Type":"ContainerDied","Data":"016de455826b6d184ca5a16bd30bad35aa94dcae5c69223e94c02c90d5109bcb"} Oct 13 13:17:16 crc kubenswrapper[4684]: I1013 13:17:16.103410 4684 generic.go:334] "Generic (PLEG): container finished" podID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerID="1b51012db5f0ed3548a82fda5d49163e3f0eaa450e614bf9a97cfa5200c239e1" exitCode=0 Oct 13 13:17:16 crc kubenswrapper[4684]: I1013 13:17:16.103452 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" event={"ID":"03260d40-6a40-406f-b8a9-7898ae8a3b16","Type":"ContainerDied","Data":"1b51012db5f0ed3548a82fda5d49163e3f0eaa450e614bf9a97cfa5200c239e1"} Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.421943 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.489611 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-util\") pod \"03260d40-6a40-406f-b8a9-7898ae8a3b16\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.591044 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trkbs\" (UniqueName: \"kubernetes.io/projected/03260d40-6a40-406f-b8a9-7898ae8a3b16-kube-api-access-trkbs\") pod \"03260d40-6a40-406f-b8a9-7898ae8a3b16\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.591385 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-bundle\") pod \"03260d40-6a40-406f-b8a9-7898ae8a3b16\" (UID: \"03260d40-6a40-406f-b8a9-7898ae8a3b16\") " Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.592184 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-bundle" (OuterVolumeSpecName: "bundle") pod "03260d40-6a40-406f-b8a9-7898ae8a3b16" (UID: "03260d40-6a40-406f-b8a9-7898ae8a3b16"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.596623 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03260d40-6a40-406f-b8a9-7898ae8a3b16-kube-api-access-trkbs" (OuterVolumeSpecName: "kube-api-access-trkbs") pod "03260d40-6a40-406f-b8a9-7898ae8a3b16" (UID: "03260d40-6a40-406f-b8a9-7898ae8a3b16"). InnerVolumeSpecName "kube-api-access-trkbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.601669 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-util" (OuterVolumeSpecName: "util") pod "03260d40-6a40-406f-b8a9-7898ae8a3b16" (UID: "03260d40-6a40-406f-b8a9-7898ae8a3b16"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.692508 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trkbs\" (UniqueName: \"kubernetes.io/projected/03260d40-6a40-406f-b8a9-7898ae8a3b16-kube-api-access-trkbs\") on node \"crc\" DevicePath \"\"" Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.692555 4684 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:17:17 crc kubenswrapper[4684]: I1013 13:17:17.692569 4684 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03260d40-6a40-406f-b8a9-7898ae8a3b16-util\") on node \"crc\" DevicePath \"\"" Oct 13 13:17:18 crc kubenswrapper[4684]: I1013 13:17:18.119513 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" event={"ID":"03260d40-6a40-406f-b8a9-7898ae8a3b16","Type":"ContainerDied","Data":"298a22d7258fe79cc7260cd0d6782b3af4d49be4a5452cb02a353a6bdf97a4bb"} Oct 13 13:17:18 crc kubenswrapper[4684]: I1013 13:17:18.119578 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt" Oct 13 13:17:18 crc kubenswrapper[4684]: I1013 13:17:18.119583 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="298a22d7258fe79cc7260cd0d6782b3af4d49be4a5452cb02a353a6bdf97a4bb" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.167310 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-c68bl"] Oct 13 13:17:20 crc kubenswrapper[4684]: E1013 13:17:20.167935 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="extract" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.167951 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="extract" Oct 13 13:17:20 crc kubenswrapper[4684]: E1013 13:17:20.167962 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="pull" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.167969 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="pull" Oct 13 13:17:20 crc kubenswrapper[4684]: E1013 13:17:20.167989 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="util" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.167997 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="util" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.168109 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="03260d40-6a40-406f-b8a9-7898ae8a3b16" containerName="extract" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.168554 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.171132 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-m6x54" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.171216 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.172105 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.178063 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-c68bl"] Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.324244 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9frfm\" (UniqueName: \"kubernetes.io/projected/bf38af4f-4552-46e1-8011-0e8924331c2f-kube-api-access-9frfm\") pod \"nmstate-operator-858ddd8f98-c68bl\" (UID: \"bf38af4f-4552-46e1-8011-0e8924331c2f\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.425089 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9frfm\" (UniqueName: \"kubernetes.io/projected/bf38af4f-4552-46e1-8011-0e8924331c2f-kube-api-access-9frfm\") pod \"nmstate-operator-858ddd8f98-c68bl\" (UID: \"bf38af4f-4552-46e1-8011-0e8924331c2f\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.445752 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9frfm\" (UniqueName: \"kubernetes.io/projected/bf38af4f-4552-46e1-8011-0e8924331c2f-kube-api-access-9frfm\") pod \"nmstate-operator-858ddd8f98-c68bl\" (UID: \"bf38af4f-4552-46e1-8011-0e8924331c2f\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" Oct 13 13:17:20 crc kubenswrapper[4684]: I1013 13:17:20.490224 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" Oct 13 13:17:21 crc kubenswrapper[4684]: I1013 13:17:20.997522 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-c68bl"] Oct 13 13:17:21 crc kubenswrapper[4684]: I1013 13:17:21.138370 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" event={"ID":"bf38af4f-4552-46e1-8011-0e8924331c2f","Type":"ContainerStarted","Data":"dd0c7e4ccaf88b5e473d7258c35616a9c263f6f289e4c572cb9cb4a816ca8a9a"} Oct 13 13:17:25 crc kubenswrapper[4684]: I1013 13:17:25.161663 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" event={"ID":"bf38af4f-4552-46e1-8011-0e8924331c2f","Type":"ContainerStarted","Data":"6a6a87093a086f13d577a46637dca7c38fed695df7ad402cb19aa0485694c52d"} Oct 13 13:17:25 crc kubenswrapper[4684]: I1013 13:17:25.189411 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c68bl" podStartSLOduration=2.178444323 podStartE2EDuration="5.189390556s" podCreationTimestamp="2025-10-13 13:17:20 +0000 UTC" firstStartedPulling="2025-10-13 13:17:21.013102144 +0000 UTC m=+595.580486244" lastFinishedPulling="2025-10-13 13:17:24.024048407 +0000 UTC m=+598.591432477" observedRunningTime="2025-10-13 13:17:25.188447537 +0000 UTC m=+599.755831627" watchObservedRunningTime="2025-10-13 13:17:25.189390556 +0000 UTC m=+599.756774646" Oct 13 13:17:26 crc kubenswrapper[4684]: I1013 13:17:26.690251 4684 scope.go:117] "RemoveContainer" containerID="d6b6e03a8d9eda8c526bd9b0c6d01d314d3dbaa9f9a9d5238dce1eac3f5c167c" Oct 13 13:17:27 crc kubenswrapper[4684]: I1013 13:17:27.176665 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r7wd2_eb2c3381-fecf-46e7-a034-d3c560dff35e/kube-multus/2.log" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.232357 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.233291 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.236235 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-rjzqp" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.242169 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.242880 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.245372 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.248570 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.275917 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.279627 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-xsv76"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.280321 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.287312 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks6p5\" (UniqueName: \"kubernetes.io/projected/aad74a49-543f-4f01-a171-0ca010ba7319-kube-api-access-ks6p5\") pod \"nmstate-metrics-fdff9cb8d-r2xpx\" (UID: \"aad74a49-543f-4f01-a171-0ca010ba7319\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.372721 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.373391 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.380064 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-nfplp" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.380130 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.380438 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389258 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-hw69f\" (UID: \"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389313 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-ovs-socket\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389467 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-dbus-socket\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389560 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n828p\" (UniqueName: \"kubernetes.io/projected/1fd2ac9e-de15-4598-a050-44f58f8e8a50-kube-api-access-n828p\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389596 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw86s\" (UniqueName: \"kubernetes.io/projected/a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa-kube-api-access-rw86s\") pod \"nmstate-webhook-6cdbc54649-hw69f\" (UID: \"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389620 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m7bh\" (UniqueName: \"kubernetes.io/projected/5608900f-c09f-4810-bde2-87588dadfe55-kube-api-access-2m7bh\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389705 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks6p5\" (UniqueName: \"kubernetes.io/projected/aad74a49-543f-4f01-a171-0ca010ba7319-kube-api-access-ks6p5\") pod \"nmstate-metrics-fdff9cb8d-r2xpx\" (UID: \"aad74a49-543f-4f01-a171-0ca010ba7319\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389795 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5608900f-c09f-4810-bde2-87588dadfe55-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.389857 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-nmstate-lock\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.390065 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5608900f-c09f-4810-bde2-87588dadfe55-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.397457 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.422345 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks6p5\" (UniqueName: \"kubernetes.io/projected/aad74a49-543f-4f01-a171-0ca010ba7319-kube-api-access-ks6p5\") pod \"nmstate-metrics-fdff9cb8d-r2xpx\" (UID: \"aad74a49-543f-4f01-a171-0ca010ba7319\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.493545 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5608900f-c09f-4810-bde2-87588dadfe55-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.494038 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-nmstate-lock\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.494254 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-nmstate-lock\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.494476 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5608900f-c09f-4810-bde2-87588dadfe55-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.494657 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-hw69f\" (UID: \"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.495301 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-ovs-socket\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.495478 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-dbus-socket\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.495620 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n828p\" (UniqueName: \"kubernetes.io/projected/1fd2ac9e-de15-4598-a050-44f58f8e8a50-kube-api-access-n828p\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.495831 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw86s\" (UniqueName: \"kubernetes.io/projected/a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa-kube-api-access-rw86s\") pod \"nmstate-webhook-6cdbc54649-hw69f\" (UID: \"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.496533 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m7bh\" (UniqueName: \"kubernetes.io/projected/5608900f-c09f-4810-bde2-87588dadfe55-kube-api-access-2m7bh\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.495882 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-ovs-socket\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: E1013 13:17:31.494664 4684 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 13 13:17:31 crc kubenswrapper[4684]: E1013 13:17:31.496879 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5608900f-c09f-4810-bde2-87588dadfe55-plugin-serving-cert podName:5608900f-c09f-4810-bde2-87588dadfe55 nodeName:}" failed. No retries permitted until 2025-10-13 13:17:31.996860636 +0000 UTC m=+606.564244706 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/5608900f-c09f-4810-bde2-87588dadfe55-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-vkcdq" (UID: "5608900f-c09f-4810-bde2-87588dadfe55") : secret "plugin-serving-cert" not found Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.495833 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5608900f-c09f-4810-bde2-87588dadfe55-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.496156 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1fd2ac9e-de15-4598-a050-44f58f8e8a50-dbus-socket\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.502783 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-hw69f\" (UID: \"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.518571 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m7bh\" (UniqueName: \"kubernetes.io/projected/5608900f-c09f-4810-bde2-87588dadfe55-kube-api-access-2m7bh\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.521019 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n828p\" (UniqueName: \"kubernetes.io/projected/1fd2ac9e-de15-4598-a050-44f58f8e8a50-kube-api-access-n828p\") pod \"nmstate-handler-xsv76\" (UID: \"1fd2ac9e-de15-4598-a050-44f58f8e8a50\") " pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.521358 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw86s\" (UniqueName: \"kubernetes.io/projected/a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa-kube-api-access-rw86s\") pod \"nmstate-webhook-6cdbc54649-hw69f\" (UID: \"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.559682 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.567438 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.574995 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-59db76454-km6g6"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.576143 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.584828 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-59db76454-km6g6"] Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599583 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8998ef4b-c505-48de-a166-02b6117b608a-console-oauth-config\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599676 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-oauth-serving-cert\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599707 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trnx5\" (UniqueName: \"kubernetes.io/projected/8998ef4b-c505-48de-a166-02b6117b608a-kube-api-access-trnx5\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599729 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-service-ca\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8998ef4b-c505-48de-a166-02b6117b608a-console-serving-cert\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599797 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-trusted-ca-bundle\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.599835 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-console-config\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.600585 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701554 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-oauth-serving-cert\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701641 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trnx5\" (UniqueName: \"kubernetes.io/projected/8998ef4b-c505-48de-a166-02b6117b608a-kube-api-access-trnx5\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701721 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-service-ca\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701756 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8998ef4b-c505-48de-a166-02b6117b608a-console-serving-cert\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701825 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-trusted-ca-bundle\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701888 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-console-config\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.701962 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8998ef4b-c505-48de-a166-02b6117b608a-console-oauth-config\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.706812 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-service-ca\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.711326 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-trusted-ca-bundle\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.711448 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-console-config\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.715557 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8998ef4b-c505-48de-a166-02b6117b608a-console-oauth-config\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.715991 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8998ef4b-c505-48de-a166-02b6117b608a-console-serving-cert\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.720239 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8998ef4b-c505-48de-a166-02b6117b608a-oauth-serving-cert\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.729839 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trnx5\" (UniqueName: \"kubernetes.io/projected/8998ef4b-c505-48de-a166-02b6117b608a-kube-api-access-trnx5\") pod \"console-59db76454-km6g6\" (UID: \"8998ef4b-c505-48de-a166-02b6117b608a\") " pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.786653 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx"] Oct 13 13:17:31 crc kubenswrapper[4684]: W1013 13:17:31.797084 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaad74a49_543f_4f01_a171_0ca010ba7319.slice/crio-c550445501ad6aa6e2dde7d22d131b20942a2d8510ee9a978dc53e34f894d936 WatchSource:0}: Error finding container c550445501ad6aa6e2dde7d22d131b20942a2d8510ee9a978dc53e34f894d936: Status 404 returned error can't find the container with id c550445501ad6aa6e2dde7d22d131b20942a2d8510ee9a978dc53e34f894d936 Oct 13 13:17:31 crc kubenswrapper[4684]: I1013 13:17:31.947343 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.007183 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5608900f-c09f-4810-bde2-87588dadfe55-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.011850 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5608900f-c09f-4810-bde2-87588dadfe55-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-vkcdq\" (UID: \"5608900f-c09f-4810-bde2-87588dadfe55\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.027536 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f"] Oct 13 13:17:32 crc kubenswrapper[4684]: W1013 13:17:32.033679 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6d3dd3f_7c9d_45b9_8f81_dd85f2554caa.slice/crio-b2d235ff10659f69b4084505fd29d0f4854c9e4f9fe6bd08365a04af183f197e WatchSource:0}: Error finding container b2d235ff10659f69b4084505fd29d0f4854c9e4f9fe6bd08365a04af183f197e: Status 404 returned error can't find the container with id b2d235ff10659f69b4084505fd29d0f4854c9e4f9fe6bd08365a04af183f197e Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.185461 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-59db76454-km6g6"] Oct 13 13:17:32 crc kubenswrapper[4684]: W1013 13:17:32.193706 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8998ef4b_c505_48de_a166_02b6117b608a.slice/crio-5b3789a24f473269ef51e342599945f1f89dd28c30e0d8a2adc0fc91edac2025 WatchSource:0}: Error finding container 5b3789a24f473269ef51e342599945f1f89dd28c30e0d8a2adc0fc91edac2025: Status 404 returned error can't find the container with id 5b3789a24f473269ef51e342599945f1f89dd28c30e0d8a2adc0fc91edac2025 Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.205552 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" event={"ID":"aad74a49-543f-4f01-a171-0ca010ba7319","Type":"ContainerStarted","Data":"c550445501ad6aa6e2dde7d22d131b20942a2d8510ee9a978dc53e34f894d936"} Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.206996 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" event={"ID":"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa","Type":"ContainerStarted","Data":"b2d235ff10659f69b4084505fd29d0f4854c9e4f9fe6bd08365a04af183f197e"} Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.208244 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-59db76454-km6g6" event={"ID":"8998ef4b-c505-48de-a166-02b6117b608a","Type":"ContainerStarted","Data":"5b3789a24f473269ef51e342599945f1f89dd28c30e0d8a2adc0fc91edac2025"} Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.209776 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xsv76" event={"ID":"1fd2ac9e-de15-4598-a050-44f58f8e8a50","Type":"ContainerStarted","Data":"0802ebb5cac50adb25ec0c70a47778b7e03d02057badf2bde6de68d4a5c37c06"} Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.293235 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" Oct 13 13:17:32 crc kubenswrapper[4684]: I1013 13:17:32.480232 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq"] Oct 13 13:17:32 crc kubenswrapper[4684]: W1013 13:17:32.489752 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5608900f_c09f_4810_bde2_87588dadfe55.slice/crio-20604391074da2d680531462933bed11c0585077f02fec9d01d53c9886b88673 WatchSource:0}: Error finding container 20604391074da2d680531462933bed11c0585077f02fec9d01d53c9886b88673: Status 404 returned error can't find the container with id 20604391074da2d680531462933bed11c0585077f02fec9d01d53c9886b88673 Oct 13 13:17:33 crc kubenswrapper[4684]: I1013 13:17:33.215132 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-59db76454-km6g6" event={"ID":"8998ef4b-c505-48de-a166-02b6117b608a","Type":"ContainerStarted","Data":"8d5528e4df62170d4ac8c81dbfcec6082976a7e27ae8edac071856a1957840cc"} Oct 13 13:17:33 crc kubenswrapper[4684]: I1013 13:17:33.216103 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" event={"ID":"5608900f-c09f-4810-bde2-87588dadfe55","Type":"ContainerStarted","Data":"20604391074da2d680531462933bed11c0585077f02fec9d01d53c9886b88673"} Oct 13 13:17:33 crc kubenswrapper[4684]: I1013 13:17:33.231474 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-59db76454-km6g6" podStartSLOduration=2.231454925 podStartE2EDuration="2.231454925s" podCreationTimestamp="2025-10-13 13:17:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:17:33.229779032 +0000 UTC m=+607.797163122" watchObservedRunningTime="2025-10-13 13:17:33.231454925 +0000 UTC m=+607.798839005" Oct 13 13:17:35 crc kubenswrapper[4684]: I1013 13:17:35.232961 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" event={"ID":"5608900f-c09f-4810-bde2-87588dadfe55","Type":"ContainerStarted","Data":"ade04bb782c59382005c48fabe16de3444edd7b19ccc5fb824a64f584679d9df"} Oct 13 13:17:35 crc kubenswrapper[4684]: I1013 13:17:35.237244 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" event={"ID":"aad74a49-543f-4f01-a171-0ca010ba7319","Type":"ContainerStarted","Data":"16bbf7d569013c6802b5fbc8fc716071fa38fef93889a7a4beba59b7dcc61b0e"} Oct 13 13:17:35 crc kubenswrapper[4684]: I1013 13:17:35.239219 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" event={"ID":"a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa","Type":"ContainerStarted","Data":"a4ff6f6c930ad166e26c5d1899fc192ff1b433355e0e77eae392bb9ea81f179b"} Oct 13 13:17:36 crc kubenswrapper[4684]: I1013 13:17:36.251000 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xsv76" event={"ID":"1fd2ac9e-de15-4598-a050-44f58f8e8a50","Type":"ContainerStarted","Data":"d76b584170799e534021f16cbcda18553afd9090338ced32cfae3fec1aef1963"} Oct 13 13:17:36 crc kubenswrapper[4684]: I1013 13:17:36.251487 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:17:36 crc kubenswrapper[4684]: I1013 13:17:36.251529 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:36 crc kubenswrapper[4684]: I1013 13:17:36.274960 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-vkcdq" podStartSLOduration=2.789700977 podStartE2EDuration="5.274936909s" podCreationTimestamp="2025-10-13 13:17:31 +0000 UTC" firstStartedPulling="2025-10-13 13:17:32.492716218 +0000 UTC m=+607.060100288" lastFinishedPulling="2025-10-13 13:17:34.97795211 +0000 UTC m=+609.545336220" observedRunningTime="2025-10-13 13:17:36.270168697 +0000 UTC m=+610.837552767" watchObservedRunningTime="2025-10-13 13:17:36.274936909 +0000 UTC m=+610.842320989" Oct 13 13:17:36 crc kubenswrapper[4684]: I1013 13:17:36.292227 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-xsv76" podStartSLOduration=1.967172745 podStartE2EDuration="5.292200805s" podCreationTimestamp="2025-10-13 13:17:31 +0000 UTC" firstStartedPulling="2025-10-13 13:17:31.652824647 +0000 UTC m=+606.220208717" lastFinishedPulling="2025-10-13 13:17:34.977852667 +0000 UTC m=+609.545236777" observedRunningTime="2025-10-13 13:17:36.288513969 +0000 UTC m=+610.855898079" watchObservedRunningTime="2025-10-13 13:17:36.292200805 +0000 UTC m=+610.859584915" Oct 13 13:17:36 crc kubenswrapper[4684]: I1013 13:17:36.314193 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" podStartSLOduration=2.359709058 podStartE2EDuration="5.314168302s" podCreationTimestamp="2025-10-13 13:17:31 +0000 UTC" firstStartedPulling="2025-10-13 13:17:32.038745259 +0000 UTC m=+606.606129329" lastFinishedPulling="2025-10-13 13:17:34.993204503 +0000 UTC m=+609.560588573" observedRunningTime="2025-10-13 13:17:36.313054826 +0000 UTC m=+610.880438986" watchObservedRunningTime="2025-10-13 13:17:36.314168302 +0000 UTC m=+610.881552402" Oct 13 13:17:38 crc kubenswrapper[4684]: I1013 13:17:38.266091 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" event={"ID":"aad74a49-543f-4f01-a171-0ca010ba7319","Type":"ContainerStarted","Data":"eb217b1c89c5a90857b727c974f0b676505d1f9ddef84cf0b5be9e324cfa53fe"} Oct 13 13:17:38 crc kubenswrapper[4684]: I1013 13:17:38.294807 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-r2xpx" podStartSLOduration=1.766929822 podStartE2EDuration="7.294777781s" podCreationTimestamp="2025-10-13 13:17:31 +0000 UTC" firstStartedPulling="2025-10-13 13:17:31.800858365 +0000 UTC m=+606.368242435" lastFinishedPulling="2025-10-13 13:17:37.328706294 +0000 UTC m=+611.896090394" observedRunningTime="2025-10-13 13:17:38.28966122 +0000 UTC m=+612.857045330" watchObservedRunningTime="2025-10-13 13:17:38.294777781 +0000 UTC m=+612.862161871" Oct 13 13:17:41 crc kubenswrapper[4684]: I1013 13:17:41.623592 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-xsv76" Oct 13 13:17:41 crc kubenswrapper[4684]: I1013 13:17:41.948267 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:41 crc kubenswrapper[4684]: I1013 13:17:41.948460 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:41 crc kubenswrapper[4684]: I1013 13:17:41.955615 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:42 crc kubenswrapper[4684]: I1013 13:17:42.297817 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-59db76454-km6g6" Oct 13 13:17:42 crc kubenswrapper[4684]: I1013 13:17:42.373023 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8vg72"] Oct 13 13:17:51 crc kubenswrapper[4684]: I1013 13:17:51.576096 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-hw69f" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.258392 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks"] Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.259969 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.261525 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.270394 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks"] Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.375103 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.375181 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.375206 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlkcl\" (UniqueName: \"kubernetes.io/projected/bcc2f7fe-8648-4e50-946b-b0792d150f63-kube-api-access-hlkcl\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.476501 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.476547 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlkcl\" (UniqueName: \"kubernetes.io/projected/bcc2f7fe-8648-4e50-946b-b0792d150f63-kube-api-access-hlkcl\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.476623 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.477259 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.477286 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.500577 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlkcl\" (UniqueName: \"kubernetes.io/projected/bcc2f7fe-8648-4e50-946b-b0792d150f63-kube-api-access-hlkcl\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.585980 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:04 crc kubenswrapper[4684]: I1013 13:18:04.784329 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks"] Oct 13 13:18:05 crc kubenswrapper[4684]: I1013 13:18:05.433655 4684 generic.go:334] "Generic (PLEG): container finished" podID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerID="2cd2a95f4f1ca30498969664d393012bd07046b70d84d49a8003b880129c7634" exitCode=0 Oct 13 13:18:05 crc kubenswrapper[4684]: I1013 13:18:05.433709 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" event={"ID":"bcc2f7fe-8648-4e50-946b-b0792d150f63","Type":"ContainerDied","Data":"2cd2a95f4f1ca30498969664d393012bd07046b70d84d49a8003b880129c7634"} Oct 13 13:18:05 crc kubenswrapper[4684]: I1013 13:18:05.433735 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" event={"ID":"bcc2f7fe-8648-4e50-946b-b0792d150f63","Type":"ContainerStarted","Data":"37e40fd57e1d5b1f8756ebb146e2b02ba07d38b3b2f6da4ea397bfdff14cec96"} Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.422816 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8vg72" podUID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" containerName="console" containerID="cri-o://234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f" gracePeriod=15 Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.452878 4684 generic.go:334] "Generic (PLEG): container finished" podID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerID="2d9858cf4d34a60dd2139ea079075049fae1e2819c57aa874c1079a86174c10f" exitCode=0 Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.452954 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" event={"ID":"bcc2f7fe-8648-4e50-946b-b0792d150f63","Type":"ContainerDied","Data":"2d9858cf4d34a60dd2139ea079075049fae1e2819c57aa874c1079a86174c10f"} Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.799095 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8vg72_4697906b-fe4e-4a08-a82c-3a5fb0129fc9/console/0.log" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.799150 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918427 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-oauth-config\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918474 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlsc2\" (UniqueName: \"kubernetes.io/projected/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-kube-api-access-rlsc2\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918493 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-oauth-serving-cert\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918530 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-config\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918576 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-service-ca\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918594 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-trusted-ca-bundle\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.918607 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-serving-cert\") pod \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\" (UID: \"4697906b-fe4e-4a08-a82c-3a5fb0129fc9\") " Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.919893 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.920041 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-config" (OuterVolumeSpecName: "console-config") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.920566 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-service-ca" (OuterVolumeSpecName: "service-ca") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.920700 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.923714 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-kube-api-access-rlsc2" (OuterVolumeSpecName: "kube-api-access-rlsc2") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "kube-api-access-rlsc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.924345 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:18:07 crc kubenswrapper[4684]: I1013 13:18:07.925075 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "4697906b-fe4e-4a08-a82c-3a5fb0129fc9" (UID: "4697906b-fe4e-4a08-a82c-3a5fb0129fc9"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019589 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlsc2\" (UniqueName: \"kubernetes.io/projected/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-kube-api-access-rlsc2\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019646 4684 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019665 4684 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019685 4684 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019702 4684 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019719 4684 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.019737 4684 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4697906b-fe4e-4a08-a82c-3a5fb0129fc9-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.463010 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8vg72_4697906b-fe4e-4a08-a82c-3a5fb0129fc9/console/0.log" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.463100 4684 generic.go:334] "Generic (PLEG): container finished" podID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" containerID="234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f" exitCode=2 Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.463219 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8vg72" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.463264 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8vg72" event={"ID":"4697906b-fe4e-4a08-a82c-3a5fb0129fc9","Type":"ContainerDied","Data":"234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f"} Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.463319 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8vg72" event={"ID":"4697906b-fe4e-4a08-a82c-3a5fb0129fc9","Type":"ContainerDied","Data":"44fcaccf9975bc34d273512dc5fff932436e5f774c79cf899cdd6ca9e709bbe4"} Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.463368 4684 scope.go:117] "RemoveContainer" containerID="234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.468320 4684 generic.go:334] "Generic (PLEG): container finished" podID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerID="a4b6842053380ad83689b85b483969ba00511b6020acc722fec1aac8ba1d18cd" exitCode=0 Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.468392 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" event={"ID":"bcc2f7fe-8648-4e50-946b-b0792d150f63","Type":"ContainerDied","Data":"a4b6842053380ad83689b85b483969ba00511b6020acc722fec1aac8ba1d18cd"} Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.479990 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8vg72"] Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.486818 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8vg72"] Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.493102 4684 scope.go:117] "RemoveContainer" containerID="234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f" Oct 13 13:18:08 crc kubenswrapper[4684]: E1013 13:18:08.493596 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f\": container with ID starting with 234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f not found: ID does not exist" containerID="234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f" Oct 13 13:18:08 crc kubenswrapper[4684]: I1013 13:18:08.493640 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f"} err="failed to get container status \"234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f\": rpc error: code = NotFound desc = could not find container \"234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f\": container with ID starting with 234420267cd04430bdc52db2f3a2e7497d2ad34294f8ddc4a312ac25a68bc38f not found: ID does not exist" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.692983 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.740353 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlkcl\" (UniqueName: \"kubernetes.io/projected/bcc2f7fe-8648-4e50-946b-b0792d150f63-kube-api-access-hlkcl\") pod \"bcc2f7fe-8648-4e50-946b-b0792d150f63\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.740438 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-bundle\") pod \"bcc2f7fe-8648-4e50-946b-b0792d150f63\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.740462 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-util\") pod \"bcc2f7fe-8648-4e50-946b-b0792d150f63\" (UID: \"bcc2f7fe-8648-4e50-946b-b0792d150f63\") " Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.741317 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-bundle" (OuterVolumeSpecName: "bundle") pod "bcc2f7fe-8648-4e50-946b-b0792d150f63" (UID: "bcc2f7fe-8648-4e50-946b-b0792d150f63"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.749269 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc2f7fe-8648-4e50-946b-b0792d150f63-kube-api-access-hlkcl" (OuterVolumeSpecName: "kube-api-access-hlkcl") pod "bcc2f7fe-8648-4e50-946b-b0792d150f63" (UID: "bcc2f7fe-8648-4e50-946b-b0792d150f63"). InnerVolumeSpecName "kube-api-access-hlkcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.754589 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-util" (OuterVolumeSpecName: "util") pod "bcc2f7fe-8648-4e50-946b-b0792d150f63" (UID: "bcc2f7fe-8648-4e50-946b-b0792d150f63"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.841820 4684 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.841875 4684 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcc2f7fe-8648-4e50-946b-b0792d150f63-util\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:09 crc kubenswrapper[4684]: I1013 13:18:09.841897 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlkcl\" (UniqueName: \"kubernetes.io/projected/bcc2f7fe-8648-4e50-946b-b0792d150f63-kube-api-access-hlkcl\") on node \"crc\" DevicePath \"\"" Oct 13 13:18:10 crc kubenswrapper[4684]: I1013 13:18:10.367048 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" path="/var/lib/kubelet/pods/4697906b-fe4e-4a08-a82c-3a5fb0129fc9/volumes" Oct 13 13:18:10 crc kubenswrapper[4684]: I1013 13:18:10.494084 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" event={"ID":"bcc2f7fe-8648-4e50-946b-b0792d150f63","Type":"ContainerDied","Data":"37e40fd57e1d5b1f8756ebb146e2b02ba07d38b3b2f6da4ea397bfdff14cec96"} Oct 13 13:18:10 crc kubenswrapper[4684]: I1013 13:18:10.494148 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37e40fd57e1d5b1f8756ebb146e2b02ba07d38b3b2f6da4ea397bfdff14cec96" Oct 13 13:18:10 crc kubenswrapper[4684]: I1013 13:18:10.494181 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.340127 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2"] Oct 13 13:18:18 crc kubenswrapper[4684]: E1013 13:18:18.340997 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="util" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341016 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="util" Oct 13 13:18:18 crc kubenswrapper[4684]: E1013 13:18:18.341032 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="pull" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341040 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="pull" Oct 13 13:18:18 crc kubenswrapper[4684]: E1013 13:18:18.341055 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" containerName="console" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341063 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" containerName="console" Oct 13 13:18:18 crc kubenswrapper[4684]: E1013 13:18:18.341077 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="extract" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341084 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="extract" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341211 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc2f7fe-8648-4e50-946b-b0792d150f63" containerName="extract" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341224 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4697906b-fe4e-4a08-a82c-3a5fb0129fc9" containerName="console" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.341692 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.346203 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.346644 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.346801 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-s4d9h" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.348587 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.348847 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.425302 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2"] Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.455271 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfdnd\" (UniqueName: \"kubernetes.io/projected/c231fd74-600a-46ff-ba30-605a9445b002-kube-api-access-jfdnd\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.455316 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c231fd74-600a-46ff-ba30-605a9445b002-webhook-cert\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.455357 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c231fd74-600a-46ff-ba30-605a9445b002-apiservice-cert\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.556326 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfdnd\" (UniqueName: \"kubernetes.io/projected/c231fd74-600a-46ff-ba30-605a9445b002-kube-api-access-jfdnd\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.556379 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c231fd74-600a-46ff-ba30-605a9445b002-webhook-cert\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.556417 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c231fd74-600a-46ff-ba30-605a9445b002-apiservice-cert\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.564300 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c231fd74-600a-46ff-ba30-605a9445b002-apiservice-cert\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.565441 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c231fd74-600a-46ff-ba30-605a9445b002-webhook-cert\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.572659 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfdnd\" (UniqueName: \"kubernetes.io/projected/c231fd74-600a-46ff-ba30-605a9445b002-kube-api-access-jfdnd\") pod \"metallb-operator-controller-manager-745c4c95f5-j54v2\" (UID: \"c231fd74-600a-46ff-ba30-605a9445b002\") " pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.645409 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8"] Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.646081 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.647827 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.650397 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.651699 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-6t8ms" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.662244 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8"] Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.663838 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.759502 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b2944a80-992b-4799-a461-82f7c2398295-webhook-cert\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.759593 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4sp9\" (UniqueName: \"kubernetes.io/projected/b2944a80-992b-4799-a461-82f7c2398295-kube-api-access-z4sp9\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.759625 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b2944a80-992b-4799-a461-82f7c2398295-apiservice-cert\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.862257 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b2944a80-992b-4799-a461-82f7c2398295-apiservice-cert\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.862560 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b2944a80-992b-4799-a461-82f7c2398295-webhook-cert\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.862943 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4sp9\" (UniqueName: \"kubernetes.io/projected/b2944a80-992b-4799-a461-82f7c2398295-kube-api-access-z4sp9\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.887716 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b2944a80-992b-4799-a461-82f7c2398295-apiservice-cert\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.888220 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b2944a80-992b-4799-a461-82f7c2398295-webhook-cert\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.890519 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4sp9\" (UniqueName: \"kubernetes.io/projected/b2944a80-992b-4799-a461-82f7c2398295-kube-api-access-z4sp9\") pod \"metallb-operator-webhook-server-6cbcf8c94c-hkxm8\" (UID: \"b2944a80-992b-4799-a461-82f7c2398295\") " pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:18 crc kubenswrapper[4684]: I1013 13:18:18.970346 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:19 crc kubenswrapper[4684]: I1013 13:18:19.114324 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2"] Oct 13 13:18:19 crc kubenswrapper[4684]: I1013 13:18:19.418349 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8"] Oct 13 13:18:19 crc kubenswrapper[4684]: W1013 13:18:19.422133 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2944a80_992b_4799_a461_82f7c2398295.slice/crio-096d95908e9dc353701d390e7e7c6728b4f31b934067abfb7a4cbf7ce756db29 WatchSource:0}: Error finding container 096d95908e9dc353701d390e7e7c6728b4f31b934067abfb7a4cbf7ce756db29: Status 404 returned error can't find the container with id 096d95908e9dc353701d390e7e7c6728b4f31b934067abfb7a4cbf7ce756db29 Oct 13 13:18:19 crc kubenswrapper[4684]: I1013 13:18:19.535692 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" event={"ID":"b2944a80-992b-4799-a461-82f7c2398295","Type":"ContainerStarted","Data":"096d95908e9dc353701d390e7e7c6728b4f31b934067abfb7a4cbf7ce756db29"} Oct 13 13:18:19 crc kubenswrapper[4684]: I1013 13:18:19.536883 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" event={"ID":"c231fd74-600a-46ff-ba30-605a9445b002","Type":"ContainerStarted","Data":"7dafa8b06875c466479041c57e8c072ebdd4c0f50ba32546ed67af34a954b5bd"} Oct 13 13:18:25 crc kubenswrapper[4684]: I1013 13:18:25.577222 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" event={"ID":"b2944a80-992b-4799-a461-82f7c2398295","Type":"ContainerStarted","Data":"7186cfc4d9e754a20abaa0366b58bfe9b12db38ac63975b816553796abdee82c"} Oct 13 13:18:25 crc kubenswrapper[4684]: I1013 13:18:25.577764 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:25 crc kubenswrapper[4684]: I1013 13:18:25.579523 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" event={"ID":"c231fd74-600a-46ff-ba30-605a9445b002","Type":"ContainerStarted","Data":"aced4736d7c6e6c6b054a8f941863ce7a2388cad8356fea0f76fc8d59c09dcf0"} Oct 13 13:18:25 crc kubenswrapper[4684]: I1013 13:18:25.579785 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:25 crc kubenswrapper[4684]: I1013 13:18:25.599130 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" podStartSLOduration=2.4366892350000002 podStartE2EDuration="7.599105121s" podCreationTimestamp="2025-10-13 13:18:18 +0000 UTC" firstStartedPulling="2025-10-13 13:18:19.424827441 +0000 UTC m=+653.992211511" lastFinishedPulling="2025-10-13 13:18:24.587243317 +0000 UTC m=+659.154627397" observedRunningTime="2025-10-13 13:18:25.597949925 +0000 UTC m=+660.165334025" watchObservedRunningTime="2025-10-13 13:18:25.599105121 +0000 UTC m=+660.166489191" Oct 13 13:18:25 crc kubenswrapper[4684]: I1013 13:18:25.616715 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" podStartSLOduration=2.189901662 podStartE2EDuration="7.616694045s" podCreationTimestamp="2025-10-13 13:18:18 +0000 UTC" firstStartedPulling="2025-10-13 13:18:19.136502059 +0000 UTC m=+653.703886129" lastFinishedPulling="2025-10-13 13:18:24.563294432 +0000 UTC m=+659.130678512" observedRunningTime="2025-10-13 13:18:25.614467125 +0000 UTC m=+660.181851195" watchObservedRunningTime="2025-10-13 13:18:25.616694045 +0000 UTC m=+660.184078115" Oct 13 13:18:38 crc kubenswrapper[4684]: I1013 13:18:38.974290 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6cbcf8c94c-hkxm8" Oct 13 13:18:58 crc kubenswrapper[4684]: I1013 13:18:58.666451 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-745c4c95f5-j54v2" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.421046 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6"] Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.421923 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.423577 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.423822 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-ctbjd" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.426571 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-zgfqf"] Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.429024 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.441318 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.441366 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.442026 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6"] Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474183 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-metrics\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474231 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2422c566-8db9-4af9-824e-0c9c91ab12b8-metrics-certs\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474277 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-reloader\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474300 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-sockets\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474320 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-conf\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474348 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-startup\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.474371 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hp48\" (UniqueName: \"kubernetes.io/projected/2422c566-8db9-4af9-824e-0c9c91ab12b8-kube-api-access-8hp48\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.542093 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-hprww"] Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.542915 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.546549 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-mk2qd" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.546721 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.546812 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.546934 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.568116 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-85wm6"] Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.569031 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.573466 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575347 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndg2s\" (UniqueName: \"kubernetes.io/projected/642eea78-8230-4b4a-b1f3-1a96d1d8942f-kube-api-access-ndg2s\") pod \"frr-k8s-webhook-server-64bf5d555-nwgv6\" (UID: \"642eea78-8230-4b4a-b1f3-1a96d1d8942f\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575388 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-reloader\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575415 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-sockets\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575435 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-conf\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575463 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-startup\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575487 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hp48\" (UniqueName: \"kubernetes.io/projected/2422c566-8db9-4af9-824e-0c9c91ab12b8-kube-api-access-8hp48\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575512 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/642eea78-8230-4b4a-b1f3-1a96d1d8942f-cert\") pod \"frr-k8s-webhook-server-64bf5d555-nwgv6\" (UID: \"642eea78-8230-4b4a-b1f3-1a96d1d8942f\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575558 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-metrics\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.575578 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2422c566-8db9-4af9-824e-0c9c91ab12b8-metrics-certs\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.577237 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-reloader\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.577473 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-sockets\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.577680 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-conf\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.578516 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2422c566-8db9-4af9-824e-0c9c91ab12b8-frr-startup\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.579029 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2422c566-8db9-4af9-824e-0c9c91ab12b8-metrics\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.583538 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-85wm6"] Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.590707 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2422c566-8db9-4af9-824e-0c9c91ab12b8-metrics-certs\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.601566 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hp48\" (UniqueName: \"kubernetes.io/projected/2422c566-8db9-4af9-824e-0c9c91ab12b8-kube-api-access-8hp48\") pod \"frr-k8s-zgfqf\" (UID: \"2422c566-8db9-4af9-824e-0c9c91ab12b8\") " pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.677103 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-metrics-certs\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.677400 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.677557 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/642eea78-8230-4b4a-b1f3-1a96d1d8942f-cert\") pod \"frr-k8s-webhook-server-64bf5d555-nwgv6\" (UID: \"642eea78-8230-4b4a-b1f3-1a96d1d8942f\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.677618 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-metrics-certs\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.677658 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2ff5f48b-1277-4a47-af89-e71172d731d6-metallb-excludel2\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.677956 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4cj8\" (UniqueName: \"kubernetes.io/projected/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-kube-api-access-d4cj8\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.678055 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-cert\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.678104 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4cw6\" (UniqueName: \"kubernetes.io/projected/2ff5f48b-1277-4a47-af89-e71172d731d6-kube-api-access-x4cw6\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.678169 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndg2s\" (UniqueName: \"kubernetes.io/projected/642eea78-8230-4b4a-b1f3-1a96d1d8942f-kube-api-access-ndg2s\") pod \"frr-k8s-webhook-server-64bf5d555-nwgv6\" (UID: \"642eea78-8230-4b4a-b1f3-1a96d1d8942f\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.680504 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/642eea78-8230-4b4a-b1f3-1a96d1d8942f-cert\") pod \"frr-k8s-webhook-server-64bf5d555-nwgv6\" (UID: \"642eea78-8230-4b4a-b1f3-1a96d1d8942f\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.695702 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndg2s\" (UniqueName: \"kubernetes.io/projected/642eea78-8230-4b4a-b1f3-1a96d1d8942f-kube-api-access-ndg2s\") pod \"frr-k8s-webhook-server-64bf5d555-nwgv6\" (UID: \"642eea78-8230-4b4a-b1f3-1a96d1d8942f\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.744978 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.759408 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.778567 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-metrics-certs\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.778701 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.778788 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-metrics-certs\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.778861 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2ff5f48b-1277-4a47-af89-e71172d731d6-metallb-excludel2\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.778974 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4cj8\" (UniqueName: \"kubernetes.io/projected/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-kube-api-access-d4cj8\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.779088 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-cert\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.779176 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4cw6\" (UniqueName: \"kubernetes.io/projected/2ff5f48b-1277-4a47-af89-e71172d731d6-kube-api-access-x4cw6\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: E1013 13:18:59.779018 4684 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 13 13:18:59 crc kubenswrapper[4684]: E1013 13:18:59.779740 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist podName:2ff5f48b-1277-4a47-af89-e71172d731d6 nodeName:}" failed. No retries permitted until 2025-10-13 13:19:00.279722519 +0000 UTC m=+694.847106589 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist") pod "speaker-hprww" (UID: "2ff5f48b-1277-4a47-af89-e71172d731d6") : secret "metallb-memberlist" not found Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.779974 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2ff5f48b-1277-4a47-af89-e71172d731d6-metallb-excludel2\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.780960 4684 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.788242 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-metrics-certs\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.795580 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4cw6\" (UniqueName: \"kubernetes.io/projected/2ff5f48b-1277-4a47-af89-e71172d731d6-kube-api-access-x4cw6\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.810133 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-metrics-certs\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.814301 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-cert\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.814724 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4cj8\" (UniqueName: \"kubernetes.io/projected/ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac-kube-api-access-d4cj8\") pod \"controller-68d546b9d8-85wm6\" (UID: \"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac\") " pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:18:59 crc kubenswrapper[4684]: I1013 13:18:59.929478 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.108998 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-85wm6"] Oct 13 13:19:00 crc kubenswrapper[4684]: W1013 13:19:00.117488 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce2e4042_84e3_41dd_ac0f_a39a3d4cb7ac.slice/crio-854a11317c2d8fd3e23e3441bb784c8dee23a2b5319b0a6c48426e102f4f6443 WatchSource:0}: Error finding container 854a11317c2d8fd3e23e3441bb784c8dee23a2b5319b0a6c48426e102f4f6443: Status 404 returned error can't find the container with id 854a11317c2d8fd3e23e3441bb784c8dee23a2b5319b0a6c48426e102f4f6443 Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.147227 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6"] Oct 13 13:19:00 crc kubenswrapper[4684]: W1013 13:19:00.156207 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod642eea78_8230_4b4a_b1f3_1a96d1d8942f.slice/crio-2636315f43f017cc32131e83e12d72b8747308b075e94c755da686eac8459265 WatchSource:0}: Error finding container 2636315f43f017cc32131e83e12d72b8747308b075e94c755da686eac8459265: Status 404 returned error can't find the container with id 2636315f43f017cc32131e83e12d72b8747308b075e94c755da686eac8459265 Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.287843 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:19:00 crc kubenswrapper[4684]: E1013 13:19:00.287993 4684 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 13 13:19:00 crc kubenswrapper[4684]: E1013 13:19:00.288049 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist podName:2ff5f48b-1277-4a47-af89-e71172d731d6 nodeName:}" failed. No retries permitted until 2025-10-13 13:19:01.288031295 +0000 UTC m=+695.855415365 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist") pod "speaker-hprww" (UID: "2ff5f48b-1277-4a47-af89-e71172d731d6") : secret "metallb-memberlist" not found Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.559551 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.559884 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.785127 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-85wm6" event={"ID":"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac","Type":"ContainerStarted","Data":"adf6c1cdb02ebabb0bd8832c8229a6450d6ca8c03e6a3fbc37e102d113ff79f9"} Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.785172 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-85wm6" event={"ID":"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac","Type":"ContainerStarted","Data":"e04a56be5b91fd23a31ab589c1c2d4f9f536458e4f6410151148103ca395ce50"} Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.785184 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-85wm6" event={"ID":"ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac","Type":"ContainerStarted","Data":"854a11317c2d8fd3e23e3441bb784c8dee23a2b5319b0a6c48426e102f4f6443"} Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.785253 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.786170 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"b750eacc9e5365f4bb999288cd092b1bcce6bf0f48cd94857eaaa7ef6f00aff7"} Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.787294 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" event={"ID":"642eea78-8230-4b4a-b1f3-1a96d1d8942f","Type":"ContainerStarted","Data":"2636315f43f017cc32131e83e12d72b8747308b075e94c755da686eac8459265"} Oct 13 13:19:00 crc kubenswrapper[4684]: I1013 13:19:00.806383 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-85wm6" podStartSLOduration=1.8063638480000002 podStartE2EDuration="1.806363848s" podCreationTimestamp="2025-10-13 13:18:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:19:00.804046345 +0000 UTC m=+695.371430415" watchObservedRunningTime="2025-10-13 13:19:00.806363848 +0000 UTC m=+695.373747938" Oct 13 13:19:01 crc kubenswrapper[4684]: I1013 13:19:01.299535 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:19:01 crc kubenswrapper[4684]: I1013 13:19:01.304967 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2ff5f48b-1277-4a47-af89-e71172d731d6-memberlist\") pod \"speaker-hprww\" (UID: \"2ff5f48b-1277-4a47-af89-e71172d731d6\") " pod="metallb-system/speaker-hprww" Oct 13 13:19:01 crc kubenswrapper[4684]: I1013 13:19:01.358093 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hprww" Oct 13 13:19:01 crc kubenswrapper[4684]: I1013 13:19:01.802514 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hprww" event={"ID":"2ff5f48b-1277-4a47-af89-e71172d731d6","Type":"ContainerStarted","Data":"6bdb3593cb5d9a8e85df4bd2aa2eb6f27bb562f33d8ae29755c59bd8896ea9de"} Oct 13 13:19:01 crc kubenswrapper[4684]: I1013 13:19:01.802813 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hprww" event={"ID":"2ff5f48b-1277-4a47-af89-e71172d731d6","Type":"ContainerStarted","Data":"4202c56dd151d4a2db14fb6c67dc32d917876826bac27ff5e1defc641c60d770"} Oct 13 13:19:02 crc kubenswrapper[4684]: I1013 13:19:02.829038 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hprww" event={"ID":"2ff5f48b-1277-4a47-af89-e71172d731d6","Type":"ContainerStarted","Data":"f7c74e2ca674cefc83a6f40656d0dbc5ed9d23d85883ad0896146efbb68357c9"} Oct 13 13:19:02 crc kubenswrapper[4684]: I1013 13:19:02.829286 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-hprww" Oct 13 13:19:02 crc kubenswrapper[4684]: I1013 13:19:02.845356 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-hprww" podStartSLOduration=3.8453396570000002 podStartE2EDuration="3.845339657s" podCreationTimestamp="2025-10-13 13:18:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:19:02.844274263 +0000 UTC m=+697.411658343" watchObservedRunningTime="2025-10-13 13:19:02.845339657 +0000 UTC m=+697.412723727" Oct 13 13:19:07 crc kubenswrapper[4684]: I1013 13:19:07.857224 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" event={"ID":"642eea78-8230-4b4a-b1f3-1a96d1d8942f","Type":"ContainerStarted","Data":"1343f67853bdc6f1b5399f339f9ac6e950908ad9a96a3717b40eaba5d68f173f"} Oct 13 13:19:07 crc kubenswrapper[4684]: I1013 13:19:07.857963 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:19:07 crc kubenswrapper[4684]: I1013 13:19:07.859836 4684 generic.go:334] "Generic (PLEG): container finished" podID="2422c566-8db9-4af9-824e-0c9c91ab12b8" containerID="4ea4e020dbedb0fe774747c92d8d8a8b02511d48fdc5ba26d2f0cd68a44fb221" exitCode=0 Oct 13 13:19:07 crc kubenswrapper[4684]: I1013 13:19:07.859873 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerDied","Data":"4ea4e020dbedb0fe774747c92d8d8a8b02511d48fdc5ba26d2f0cd68a44fb221"} Oct 13 13:19:07 crc kubenswrapper[4684]: I1013 13:19:07.876424 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" podStartSLOduration=1.773121282 podStartE2EDuration="8.876407778s" podCreationTimestamp="2025-10-13 13:18:59 +0000 UTC" firstStartedPulling="2025-10-13 13:19:00.158664572 +0000 UTC m=+694.726048652" lastFinishedPulling="2025-10-13 13:19:07.261951088 +0000 UTC m=+701.829335148" observedRunningTime="2025-10-13 13:19:07.873104453 +0000 UTC m=+702.440488523" watchObservedRunningTime="2025-10-13 13:19:07.876407778 +0000 UTC m=+702.443791848" Oct 13 13:19:08 crc kubenswrapper[4684]: I1013 13:19:08.868057 4684 generic.go:334] "Generic (PLEG): container finished" podID="2422c566-8db9-4af9-824e-0c9c91ab12b8" containerID="ef0519c6bf5a5dbacc741670997813082772217f6d8d6fb24cf5a49c06d2c692" exitCode=0 Oct 13 13:19:08 crc kubenswrapper[4684]: I1013 13:19:08.868137 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerDied","Data":"ef0519c6bf5a5dbacc741670997813082772217f6d8d6fb24cf5a49c06d2c692"} Oct 13 13:19:09 crc kubenswrapper[4684]: I1013 13:19:09.878062 4684 generic.go:334] "Generic (PLEG): container finished" podID="2422c566-8db9-4af9-824e-0c9c91ab12b8" containerID="ba543244c7ec6ef107cfb570e87dd2b59ea05cfbc407a3c0a20e72e36d31a3cf" exitCode=0 Oct 13 13:19:09 crc kubenswrapper[4684]: I1013 13:19:09.878118 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerDied","Data":"ba543244c7ec6ef107cfb570e87dd2b59ea05cfbc407a3c0a20e72e36d31a3cf"} Oct 13 13:19:10 crc kubenswrapper[4684]: I1013 13:19:10.890806 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"098b79778a0baed7b3f1202f851160e024cf47f17a0fb8627ac0d660917e0898"} Oct 13 13:19:10 crc kubenswrapper[4684]: I1013 13:19:10.891225 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"84ecdd85c522f9c764a78f2cbc8bfceab48505d03683b2db85031625d7d8d136"} Oct 13 13:19:10 crc kubenswrapper[4684]: I1013 13:19:10.891241 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"8627fe4911ce6bf83a4667a6f432c453cefe7d09d3d4f5eb8ddbe928d3a80e15"} Oct 13 13:19:10 crc kubenswrapper[4684]: I1013 13:19:10.891250 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"a0c67d8b657b679bcab92cc8a2c2d5c4683a3d371a50fbe94823ec73d679f4dd"} Oct 13 13:19:10 crc kubenswrapper[4684]: I1013 13:19:10.891259 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"f275be8a85d9954ff6c2bbb4de447b80a6b1a6e75d888e535b55850ef96bd547"} Oct 13 13:19:11 crc kubenswrapper[4684]: I1013 13:19:11.363867 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-hprww" Oct 13 13:19:11 crc kubenswrapper[4684]: I1013 13:19:11.905938 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zgfqf" event={"ID":"2422c566-8db9-4af9-824e-0c9c91ab12b8","Type":"ContainerStarted","Data":"271742291e1a0ad0744eb01dffa15da050a75faee5a3de7f2e666f0ea404459f"} Oct 13 13:19:11 crc kubenswrapper[4684]: I1013 13:19:11.906978 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:19:11 crc kubenswrapper[4684]: I1013 13:19:11.931057 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-zgfqf" podStartSLOduration=5.638720642 podStartE2EDuration="12.93103814s" podCreationTimestamp="2025-10-13 13:18:59 +0000 UTC" firstStartedPulling="2025-10-13 13:18:59.967172132 +0000 UTC m=+694.534556212" lastFinishedPulling="2025-10-13 13:19:07.25948964 +0000 UTC m=+701.826873710" observedRunningTime="2025-10-13 13:19:11.92597226 +0000 UTC m=+706.493356340" watchObservedRunningTime="2025-10-13 13:19:11.93103814 +0000 UTC m=+706.498422210" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.142715 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-f66cs"] Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.145607 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.148282 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.148655 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.149003 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-wplsc" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.165165 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f66cs"] Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.270656 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrzvl\" (UniqueName: \"kubernetes.io/projected/ed95420b-0952-4b64-a82c-6f543d4291fd-kube-api-access-vrzvl\") pod \"openstack-operator-index-f66cs\" (UID: \"ed95420b-0952-4b64-a82c-6f543d4291fd\") " pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.372204 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrzvl\" (UniqueName: \"kubernetes.io/projected/ed95420b-0952-4b64-a82c-6f543d4291fd-kube-api-access-vrzvl\") pod \"openstack-operator-index-f66cs\" (UID: \"ed95420b-0952-4b64-a82c-6f543d4291fd\") " pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.392486 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrzvl\" (UniqueName: \"kubernetes.io/projected/ed95420b-0952-4b64-a82c-6f543d4291fd-kube-api-access-vrzvl\") pod \"openstack-operator-index-f66cs\" (UID: \"ed95420b-0952-4b64-a82c-6f543d4291fd\") " pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.471450 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.668445 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f66cs"] Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.760342 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.819975 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:19:14 crc kubenswrapper[4684]: I1013 13:19:14.929026 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f66cs" event={"ID":"ed95420b-0952-4b64-a82c-6f543d4291fd","Type":"ContainerStarted","Data":"44d5822d4b677786f0fb1f2b73d7e753944eb510249f9e4e73c2aa25c3f8de7b"} Oct 13 13:19:16 crc kubenswrapper[4684]: I1013 13:19:16.943765 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f66cs" event={"ID":"ed95420b-0952-4b64-a82c-6f543d4291fd","Type":"ContainerStarted","Data":"7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70"} Oct 13 13:19:16 crc kubenswrapper[4684]: I1013 13:19:16.970759 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-f66cs" podStartSLOduration=1.758606802 podStartE2EDuration="2.970719242s" podCreationTimestamp="2025-10-13 13:19:14 +0000 UTC" firstStartedPulling="2025-10-13 13:19:14.693379188 +0000 UTC m=+709.260763258" lastFinishedPulling="2025-10-13 13:19:15.905491638 +0000 UTC m=+710.472875698" observedRunningTime="2025-10-13 13:19:16.968602905 +0000 UTC m=+711.535987005" watchObservedRunningTime="2025-10-13 13:19:16.970719242 +0000 UTC m=+711.538103332" Oct 13 13:19:17 crc kubenswrapper[4684]: I1013 13:19:17.524476 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-f66cs"] Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.131546 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6rh8x"] Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.132797 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.155124 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6rh8x"] Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.232990 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz2sv\" (UniqueName: \"kubernetes.io/projected/f355c9e7-5235-4ab1-891a-006f5c66de34-kube-api-access-nz2sv\") pod \"openstack-operator-index-6rh8x\" (UID: \"f355c9e7-5235-4ab1-891a-006f5c66de34\") " pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.333848 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz2sv\" (UniqueName: \"kubernetes.io/projected/f355c9e7-5235-4ab1-891a-006f5c66de34-kube-api-access-nz2sv\") pod \"openstack-operator-index-6rh8x\" (UID: \"f355c9e7-5235-4ab1-891a-006f5c66de34\") " pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.367473 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz2sv\" (UniqueName: \"kubernetes.io/projected/f355c9e7-5235-4ab1-891a-006f5c66de34-kube-api-access-nz2sv\") pod \"openstack-operator-index-6rh8x\" (UID: \"f355c9e7-5235-4ab1-891a-006f5c66de34\") " pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.480156 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.910705 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6rh8x"] Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.962594 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6rh8x" event={"ID":"f355c9e7-5235-4ab1-891a-006f5c66de34","Type":"ContainerStarted","Data":"9faf78c4b04c8ce7ddb43fe5edf8b4fd7a9750b82236f4ebcf884bacfc3a732d"} Oct 13 13:19:18 crc kubenswrapper[4684]: I1013 13:19:18.962713 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-f66cs" podUID="ed95420b-0952-4b64-a82c-6f543d4291fd" containerName="registry-server" containerID="cri-o://7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70" gracePeriod=2 Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.367648 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.553375 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrzvl\" (UniqueName: \"kubernetes.io/projected/ed95420b-0952-4b64-a82c-6f543d4291fd-kube-api-access-vrzvl\") pod \"ed95420b-0952-4b64-a82c-6f543d4291fd\" (UID: \"ed95420b-0952-4b64-a82c-6f543d4291fd\") " Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.560279 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed95420b-0952-4b64-a82c-6f543d4291fd-kube-api-access-vrzvl" (OuterVolumeSpecName: "kube-api-access-vrzvl") pod "ed95420b-0952-4b64-a82c-6f543d4291fd" (UID: "ed95420b-0952-4b64-a82c-6f543d4291fd"). InnerVolumeSpecName "kube-api-access-vrzvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.654848 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrzvl\" (UniqueName: \"kubernetes.io/projected/ed95420b-0952-4b64-a82c-6f543d4291fd-kube-api-access-vrzvl\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.751699 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-nwgv6" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.937025 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-85wm6" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.975952 4684 generic.go:334] "Generic (PLEG): container finished" podID="ed95420b-0952-4b64-a82c-6f543d4291fd" containerID="7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70" exitCode=0 Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.976052 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f66cs" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.976100 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f66cs" event={"ID":"ed95420b-0952-4b64-a82c-6f543d4291fd","Type":"ContainerDied","Data":"7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70"} Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.976155 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f66cs" event={"ID":"ed95420b-0952-4b64-a82c-6f543d4291fd","Type":"ContainerDied","Data":"44d5822d4b677786f0fb1f2b73d7e753944eb510249f9e4e73c2aa25c3f8de7b"} Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.976186 4684 scope.go:117] "RemoveContainer" containerID="7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70" Oct 13 13:19:19 crc kubenswrapper[4684]: I1013 13:19:19.979557 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6rh8x" event={"ID":"f355c9e7-5235-4ab1-891a-006f5c66de34","Type":"ContainerStarted","Data":"747833e79ddbdf5410da44bfd526875496cab8368f4a74bec0e48d7664009b2d"} Oct 13 13:19:20 crc kubenswrapper[4684]: I1013 13:19:20.001646 4684 scope.go:117] "RemoveContainer" containerID="7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70" Oct 13 13:19:20 crc kubenswrapper[4684]: E1013 13:19:20.002771 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70\": container with ID starting with 7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70 not found: ID does not exist" containerID="7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70" Oct 13 13:19:20 crc kubenswrapper[4684]: I1013 13:19:20.003319 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70"} err="failed to get container status \"7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70\": rpc error: code = NotFound desc = could not find container \"7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70\": container with ID starting with 7d6206d2f10cc4c2f7695afebf572208464643f9efaff4e912793f6ec5da2e70 not found: ID does not exist" Oct 13 13:19:20 crc kubenswrapper[4684]: I1013 13:19:20.005519 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6rh8x" podStartSLOduration=1.459806205 podStartE2EDuration="2.005494869s" podCreationTimestamp="2025-10-13 13:19:18 +0000 UTC" firstStartedPulling="2025-10-13 13:19:18.93631539 +0000 UTC m=+713.503699500" lastFinishedPulling="2025-10-13 13:19:19.482004094 +0000 UTC m=+714.049388164" observedRunningTime="2025-10-13 13:19:19.998129007 +0000 UTC m=+714.565513117" watchObservedRunningTime="2025-10-13 13:19:20.005494869 +0000 UTC m=+714.572878949" Oct 13 13:19:20 crc kubenswrapper[4684]: I1013 13:19:20.022942 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-f66cs"] Oct 13 13:19:20 crc kubenswrapper[4684]: I1013 13:19:20.026831 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-f66cs"] Oct 13 13:19:20 crc kubenswrapper[4684]: I1013 13:19:20.364320 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed95420b-0952-4b64-a82c-6f543d4291fd" path="/var/lib/kubelet/pods/ed95420b-0952-4b64-a82c-6f543d4291fd/volumes" Oct 13 13:19:28 crc kubenswrapper[4684]: I1013 13:19:28.481408 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:28 crc kubenswrapper[4684]: I1013 13:19:28.481891 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:28 crc kubenswrapper[4684]: I1013 13:19:28.529692 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:29 crc kubenswrapper[4684]: I1013 13:19:29.092585 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-6rh8x" Oct 13 13:19:29 crc kubenswrapper[4684]: I1013 13:19:29.765877 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-zgfqf" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.559567 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.559655 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.976087 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp"] Oct 13 13:19:30 crc kubenswrapper[4684]: E1013 13:19:30.976575 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed95420b-0952-4b64-a82c-6f543d4291fd" containerName="registry-server" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.976608 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed95420b-0952-4b64-a82c-6f543d4291fd" containerName="registry-server" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.976895 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed95420b-0952-4b64-a82c-6f543d4291fd" containerName="registry-server" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.979204 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.982600 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-r76rb" Oct 13 13:19:30 crc kubenswrapper[4684]: I1013 13:19:30.986494 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp"] Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.136763 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-bundle\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.136835 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5lp2\" (UniqueName: \"kubernetes.io/projected/d86efc0c-a787-4c60-9ed0-3ffcde968316-kube-api-access-w5lp2\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.136860 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-util\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.238388 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-bundle\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.238513 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5lp2\" (UniqueName: \"kubernetes.io/projected/d86efc0c-a787-4c60-9ed0-3ffcde968316-kube-api-access-w5lp2\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.238565 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-util\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.239257 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-util\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.239400 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-bundle\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.259418 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5lp2\" (UniqueName: \"kubernetes.io/projected/d86efc0c-a787-4c60-9ed0-3ffcde968316-kube-api-access-w5lp2\") pod \"32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.300477 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:31 crc kubenswrapper[4684]: I1013 13:19:31.726065 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp"] Oct 13 13:19:31 crc kubenswrapper[4684]: W1013 13:19:31.741054 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd86efc0c_a787_4c60_9ed0_3ffcde968316.slice/crio-2e3cffd4fe7060466dd700c6629d7335d577bade0a09bfcc3677cbc8e1bd2b16 WatchSource:0}: Error finding container 2e3cffd4fe7060466dd700c6629d7335d577bade0a09bfcc3677cbc8e1bd2b16: Status 404 returned error can't find the container with id 2e3cffd4fe7060466dd700c6629d7335d577bade0a09bfcc3677cbc8e1bd2b16 Oct 13 13:19:32 crc kubenswrapper[4684]: I1013 13:19:32.076210 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" event={"ID":"d86efc0c-a787-4c60-9ed0-3ffcde968316","Type":"ContainerStarted","Data":"23db22446db677c8602d736d066d532f8e0b278eec7642b2eb51dba6b8b77c5c"} Oct 13 13:19:32 crc kubenswrapper[4684]: I1013 13:19:32.076261 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" event={"ID":"d86efc0c-a787-4c60-9ed0-3ffcde968316","Type":"ContainerStarted","Data":"2e3cffd4fe7060466dd700c6629d7335d577bade0a09bfcc3677cbc8e1bd2b16"} Oct 13 13:19:33 crc kubenswrapper[4684]: I1013 13:19:33.083427 4684 generic.go:334] "Generic (PLEG): container finished" podID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerID="23db22446db677c8602d736d066d532f8e0b278eec7642b2eb51dba6b8b77c5c" exitCode=0 Oct 13 13:19:33 crc kubenswrapper[4684]: I1013 13:19:33.083477 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" event={"ID":"d86efc0c-a787-4c60-9ed0-3ffcde968316","Type":"ContainerDied","Data":"23db22446db677c8602d736d066d532f8e0b278eec7642b2eb51dba6b8b77c5c"} Oct 13 13:19:35 crc kubenswrapper[4684]: I1013 13:19:35.103420 4684 generic.go:334] "Generic (PLEG): container finished" podID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerID="88ef87eedeccc96291bbdfdfed870e0c9532b35797a6a4692625b7cf00321bff" exitCode=0 Oct 13 13:19:35 crc kubenswrapper[4684]: I1013 13:19:35.103551 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" event={"ID":"d86efc0c-a787-4c60-9ed0-3ffcde968316","Type":"ContainerDied","Data":"88ef87eedeccc96291bbdfdfed870e0c9532b35797a6a4692625b7cf00321bff"} Oct 13 13:19:36 crc kubenswrapper[4684]: I1013 13:19:36.114329 4684 generic.go:334] "Generic (PLEG): container finished" podID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerID="c56c535e3bea8bb573b6643cb7d35dd5ae33d96e94c978ab6a7d6f98a2d2599f" exitCode=0 Oct 13 13:19:36 crc kubenswrapper[4684]: I1013 13:19:36.114420 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" event={"ID":"d86efc0c-a787-4c60-9ed0-3ffcde968316","Type":"ContainerDied","Data":"c56c535e3bea8bb573b6643cb7d35dd5ae33d96e94c978ab6a7d6f98a2d2599f"} Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.386315 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.428288 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5lp2\" (UniqueName: \"kubernetes.io/projected/d86efc0c-a787-4c60-9ed0-3ffcde968316-kube-api-access-w5lp2\") pod \"d86efc0c-a787-4c60-9ed0-3ffcde968316\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.428367 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-util\") pod \"d86efc0c-a787-4c60-9ed0-3ffcde968316\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.428404 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-bundle\") pod \"d86efc0c-a787-4c60-9ed0-3ffcde968316\" (UID: \"d86efc0c-a787-4c60-9ed0-3ffcde968316\") " Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.429611 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-bundle" (OuterVolumeSpecName: "bundle") pod "d86efc0c-a787-4c60-9ed0-3ffcde968316" (UID: "d86efc0c-a787-4c60-9ed0-3ffcde968316"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.437450 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d86efc0c-a787-4c60-9ed0-3ffcde968316-kube-api-access-w5lp2" (OuterVolumeSpecName: "kube-api-access-w5lp2") pod "d86efc0c-a787-4c60-9ed0-3ffcde968316" (UID: "d86efc0c-a787-4c60-9ed0-3ffcde968316"). InnerVolumeSpecName "kube-api-access-w5lp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.439101 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-util" (OuterVolumeSpecName: "util") pod "d86efc0c-a787-4c60-9ed0-3ffcde968316" (UID: "d86efc0c-a787-4c60-9ed0-3ffcde968316"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.529630 4684 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-util\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.529665 4684 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d86efc0c-a787-4c60-9ed0-3ffcde968316-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:37 crc kubenswrapper[4684]: I1013 13:19:37.529674 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5lp2\" (UniqueName: \"kubernetes.io/projected/d86efc0c-a787-4c60-9ed0-3ffcde968316-kube-api-access-w5lp2\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:38 crc kubenswrapper[4684]: I1013 13:19:38.130087 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" event={"ID":"d86efc0c-a787-4c60-9ed0-3ffcde968316","Type":"ContainerDied","Data":"2e3cffd4fe7060466dd700c6629d7335d577bade0a09bfcc3677cbc8e1bd2b16"} Oct 13 13:19:38 crc kubenswrapper[4684]: I1013 13:19:38.130146 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e3cffd4fe7060466dd700c6629d7335d577bade0a09bfcc3677cbc8e1bd2b16" Oct 13 13:19:38 crc kubenswrapper[4684]: I1013 13:19:38.130181 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.503014 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz"] Oct 13 13:19:43 crc kubenswrapper[4684]: E1013 13:19:43.503600 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="extract" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.503616 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="extract" Oct 13 13:19:43 crc kubenswrapper[4684]: E1013 13:19:43.503632 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="util" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.503637 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="util" Oct 13 13:19:43 crc kubenswrapper[4684]: E1013 13:19:43.503648 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="pull" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.503655 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="pull" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.503751 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="d86efc0c-a787-4c60-9ed0-3ffcde968316" containerName="extract" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.504325 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.505637 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-5b4b4" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.541226 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz"] Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.605298 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcd7r\" (UniqueName: \"kubernetes.io/projected/a5d7b84e-4a82-4671-b69f-ec15f4446875-kube-api-access-wcd7r\") pod \"openstack-operator-controller-operator-64895cd698-7gflz\" (UID: \"a5d7b84e-4a82-4671-b69f-ec15f4446875\") " pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.706360 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcd7r\" (UniqueName: \"kubernetes.io/projected/a5d7b84e-4a82-4671-b69f-ec15f4446875-kube-api-access-wcd7r\") pod \"openstack-operator-controller-operator-64895cd698-7gflz\" (UID: \"a5d7b84e-4a82-4671-b69f-ec15f4446875\") " pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.728014 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcd7r\" (UniqueName: \"kubernetes.io/projected/a5d7b84e-4a82-4671-b69f-ec15f4446875-kube-api-access-wcd7r\") pod \"openstack-operator-controller-operator-64895cd698-7gflz\" (UID: \"a5d7b84e-4a82-4671-b69f-ec15f4446875\") " pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:43 crc kubenswrapper[4684]: I1013 13:19:43.837800 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:44 crc kubenswrapper[4684]: I1013 13:19:44.281100 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz"] Oct 13 13:19:45 crc kubenswrapper[4684]: I1013 13:19:45.177588 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" event={"ID":"a5d7b84e-4a82-4671-b69f-ec15f4446875","Type":"ContainerStarted","Data":"d34d8f4f56bcce5f9d322effcf9889cb9d651011655e4e5da58b99d2dc027840"} Oct 13 13:19:49 crc kubenswrapper[4684]: I1013 13:19:49.203288 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" event={"ID":"a5d7b84e-4a82-4671-b69f-ec15f4446875","Type":"ContainerStarted","Data":"f3e0a2b51ca20fac360ed59fdf0d372102e0aa7b64b3fa1ddde9657db81bba97"} Oct 13 13:19:51 crc kubenswrapper[4684]: I1013 13:19:51.221407 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" event={"ID":"a5d7b84e-4a82-4671-b69f-ec15f4446875","Type":"ContainerStarted","Data":"fe3b0a48fc85bf7d2aa0a159f70096f75fcf8e200ee2e500675c65d9481b316c"} Oct 13 13:19:51 crc kubenswrapper[4684]: I1013 13:19:51.221745 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:51 crc kubenswrapper[4684]: I1013 13:19:51.253305 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" podStartSLOduration=1.537900203 podStartE2EDuration="8.253284335s" podCreationTimestamp="2025-10-13 13:19:43 +0000 UTC" firstStartedPulling="2025-10-13 13:19:44.29135814 +0000 UTC m=+738.858742210" lastFinishedPulling="2025-10-13 13:19:51.006742272 +0000 UTC m=+745.574126342" observedRunningTime="2025-10-13 13:19:51.248570367 +0000 UTC m=+745.815954447" watchObservedRunningTime="2025-10-13 13:19:51.253284335 +0000 UTC m=+745.820668405" Oct 13 13:19:53 crc kubenswrapper[4684]: I1013 13:19:53.840829 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-64895cd698-7gflz" Oct 13 13:19:55 crc kubenswrapper[4684]: I1013 13:19:55.724748 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2ws7t"] Oct 13 13:19:55 crc kubenswrapper[4684]: I1013 13:19:55.725097 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" podUID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" containerName="controller-manager" containerID="cri-o://31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a" gracePeriod=30 Oct 13 13:19:55 crc kubenswrapper[4684]: I1013 13:19:55.751994 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6"] Oct 13 13:19:55 crc kubenswrapper[4684]: I1013 13:19:55.752422 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" podUID="aa90c071-3247-46ed-a635-b234d452ae89" containerName="route-controller-manager" containerID="cri-o://33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475" gracePeriod=30 Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.206751 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.211826 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.260807 4684 generic.go:334] "Generic (PLEG): container finished" podID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" containerID="31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a" exitCode=0 Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.260867 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" event={"ID":"08819dbc-2a7c-4fe9-9084-eb6ce24c2857","Type":"ContainerDied","Data":"31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a"} Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.260893 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" event={"ID":"08819dbc-2a7c-4fe9-9084-eb6ce24c2857","Type":"ContainerDied","Data":"4947120cd015d14b2a52c601492ed2b11ec2c0fa03e70bfe83fc8f96292aa586"} Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.260923 4684 scope.go:117] "RemoveContainer" containerID="31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.261046 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2ws7t" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.263354 4684 generic.go:334] "Generic (PLEG): container finished" podID="aa90c071-3247-46ed-a635-b234d452ae89" containerID="33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475" exitCode=0 Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.263379 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" event={"ID":"aa90c071-3247-46ed-a635-b234d452ae89","Type":"ContainerDied","Data":"33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475"} Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.263400 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" event={"ID":"aa90c071-3247-46ed-a635-b234d452ae89","Type":"ContainerDied","Data":"9e698b308dde4faf1f7e312625eb2fa324a460c564e01a94e839da52e4309201"} Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.263413 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.281485 4684 scope.go:117] "RemoveContainer" containerID="31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a" Oct 13 13:19:56 crc kubenswrapper[4684]: E1013 13:19:56.281989 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a\": container with ID starting with 31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a not found: ID does not exist" containerID="31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.282042 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a"} err="failed to get container status \"31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a\": rpc error: code = NotFound desc = could not find container \"31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a\": container with ID starting with 31ae535f1faf9d7215ab35027ab168bd5cb1163110968193e0701b02b8506a5a not found: ID does not exist" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.282068 4684 scope.go:117] "RemoveContainer" containerID="33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.304600 4684 scope.go:117] "RemoveContainer" containerID="33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475" Oct 13 13:19:56 crc kubenswrapper[4684]: E1013 13:19:56.306630 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475\": container with ID starting with 33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475 not found: ID does not exist" containerID="33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.306686 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475"} err="failed to get container status \"33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475\": rpc error: code = NotFound desc = could not find container \"33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475\": container with ID starting with 33d8b5f54fc4eac6e9ca0e958a2dbb1c8b5fd8e6176a9d5747f9f61196053475 not found: ID does not exist" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386253 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-proxy-ca-bundles\") pod \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386364 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-client-ca\") pod \"aa90c071-3247-46ed-a635-b234d452ae89\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386448 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-config\") pod \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386530 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-serving-cert\") pod \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386584 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg2kw\" (UniqueName: \"kubernetes.io/projected/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-kube-api-access-pg2kw\") pod \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386620 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-client-ca\") pod \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\" (UID: \"08819dbc-2a7c-4fe9-9084-eb6ce24c2857\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386662 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa90c071-3247-46ed-a635-b234d452ae89-serving-cert\") pod \"aa90c071-3247-46ed-a635-b234d452ae89\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386694 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4lct\" (UniqueName: \"kubernetes.io/projected/aa90c071-3247-46ed-a635-b234d452ae89-kube-api-access-b4lct\") pod \"aa90c071-3247-46ed-a635-b234d452ae89\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.386744 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-config\") pod \"aa90c071-3247-46ed-a635-b234d452ae89\" (UID: \"aa90c071-3247-46ed-a635-b234d452ae89\") " Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.391137 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-client-ca" (OuterVolumeSpecName: "client-ca") pod "08819dbc-2a7c-4fe9-9084-eb6ce24c2857" (UID: "08819dbc-2a7c-4fe9-9084-eb6ce24c2857"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.391223 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-config" (OuterVolumeSpecName: "config") pod "08819dbc-2a7c-4fe9-9084-eb6ce24c2857" (UID: "08819dbc-2a7c-4fe9-9084-eb6ce24c2857"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.391262 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-client-ca" (OuterVolumeSpecName: "client-ca") pod "aa90c071-3247-46ed-a635-b234d452ae89" (UID: "aa90c071-3247-46ed-a635-b234d452ae89"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.391655 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "08819dbc-2a7c-4fe9-9084-eb6ce24c2857" (UID: "08819dbc-2a7c-4fe9-9084-eb6ce24c2857"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.396688 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-kube-api-access-pg2kw" (OuterVolumeSpecName: "kube-api-access-pg2kw") pod "08819dbc-2a7c-4fe9-9084-eb6ce24c2857" (UID: "08819dbc-2a7c-4fe9-9084-eb6ce24c2857"). InnerVolumeSpecName "kube-api-access-pg2kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.396709 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa90c071-3247-46ed-a635-b234d452ae89-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "aa90c071-3247-46ed-a635-b234d452ae89" (UID: "aa90c071-3247-46ed-a635-b234d452ae89"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.396961 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-config" (OuterVolumeSpecName: "config") pod "aa90c071-3247-46ed-a635-b234d452ae89" (UID: "aa90c071-3247-46ed-a635-b234d452ae89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.397034 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "08819dbc-2a7c-4fe9-9084-eb6ce24c2857" (UID: "08819dbc-2a7c-4fe9-9084-eb6ce24c2857"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.397246 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa90c071-3247-46ed-a635-b234d452ae89-kube-api-access-b4lct" (OuterVolumeSpecName: "kube-api-access-b4lct") pod "aa90c071-3247-46ed-a635-b234d452ae89" (UID: "aa90c071-3247-46ed-a635-b234d452ae89"). InnerVolumeSpecName "kube-api-access-b4lct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488125 4684 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488161 4684 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488170 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488179 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488187 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg2kw\" (UniqueName: \"kubernetes.io/projected/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-kube-api-access-pg2kw\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488197 4684 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/08819dbc-2a7c-4fe9-9084-eb6ce24c2857-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488205 4684 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa90c071-3247-46ed-a635-b234d452ae89-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488212 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4lct\" (UniqueName: \"kubernetes.io/projected/aa90c071-3247-46ed-a635-b234d452ae89-kube-api-access-b4lct\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.488220 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa90c071-3247-46ed-a635-b234d452ae89-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.604673 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2ws7t"] Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.612717 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2ws7t"] Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.622243 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6"] Oct 13 13:19:56 crc kubenswrapper[4684]: I1013 13:19:56.625439 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c8ww6"] Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.500935 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-69747b8d57-sm6gn"] Oct 13 13:19:57 crc kubenswrapper[4684]: E1013 13:19:57.501220 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" containerName="controller-manager" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.501237 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" containerName="controller-manager" Oct 13 13:19:57 crc kubenswrapper[4684]: E1013 13:19:57.501254 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa90c071-3247-46ed-a635-b234d452ae89" containerName="route-controller-manager" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.501262 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa90c071-3247-46ed-a635-b234d452ae89" containerName="route-controller-manager" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.501421 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa90c071-3247-46ed-a635-b234d452ae89" containerName="route-controller-manager" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.501438 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" containerName="controller-manager" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.501825 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.507247 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.508268 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7"] Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.508761 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.509182 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.509208 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.509242 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.509255 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.509260 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.511831 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.512042 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.515587 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.515940 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.516002 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.516174 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.519258 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69747b8d57-sm6gn"] Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.520821 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.524409 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7"] Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601059 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-client-ca\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601127 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aac232ef-e594-4f92-a91b-4f759d3eb53e-config\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601168 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-proxy-ca-bundles\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601210 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6abba0b0-0a7b-4499-b252-0c906e14721d-serving-cert\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601242 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aac232ef-e594-4f92-a91b-4f759d3eb53e-serving-cert\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601287 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr6wl\" (UniqueName: \"kubernetes.io/projected/aac232ef-e594-4f92-a91b-4f759d3eb53e-kube-api-access-tr6wl\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601326 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-config\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601370 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh4lt\" (UniqueName: \"kubernetes.io/projected/6abba0b0-0a7b-4499-b252-0c906e14721d-kube-api-access-dh4lt\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.601509 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aac232ef-e594-4f92-a91b-4f759d3eb53e-client-ca\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.703307 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-config\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.703615 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh4lt\" (UniqueName: \"kubernetes.io/projected/6abba0b0-0a7b-4499-b252-0c906e14721d-kube-api-access-dh4lt\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.703773 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aac232ef-e594-4f92-a91b-4f759d3eb53e-client-ca\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.703883 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-client-ca\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.703976 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aac232ef-e594-4f92-a91b-4f759d3eb53e-config\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.704047 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-proxy-ca-bundles\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.704123 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6abba0b0-0a7b-4499-b252-0c906e14721d-serving-cert\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.704188 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aac232ef-e594-4f92-a91b-4f759d3eb53e-serving-cert\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.704263 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr6wl\" (UniqueName: \"kubernetes.io/projected/aac232ef-e594-4f92-a91b-4f759d3eb53e-kube-api-access-tr6wl\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.704671 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aac232ef-e594-4f92-a91b-4f759d3eb53e-client-ca\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.704830 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-client-ca\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.705060 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-config\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.705671 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aac232ef-e594-4f92-a91b-4f759d3eb53e-config\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.705804 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6abba0b0-0a7b-4499-b252-0c906e14721d-proxy-ca-bundles\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.708568 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6abba0b0-0a7b-4499-b252-0c906e14721d-serving-cert\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.709965 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aac232ef-e594-4f92-a91b-4f759d3eb53e-serving-cert\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.727711 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh4lt\" (UniqueName: \"kubernetes.io/projected/6abba0b0-0a7b-4499-b252-0c906e14721d-kube-api-access-dh4lt\") pod \"controller-manager-69747b8d57-sm6gn\" (UID: \"6abba0b0-0a7b-4499-b252-0c906e14721d\") " pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.729948 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr6wl\" (UniqueName: \"kubernetes.io/projected/aac232ef-e594-4f92-a91b-4f759d3eb53e-kube-api-access-tr6wl\") pod \"route-controller-manager-6f55d6f799-2hqh7\" (UID: \"aac232ef-e594-4f92-a91b-4f759d3eb53e\") " pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.834620 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:57 crc kubenswrapper[4684]: I1013 13:19:57.847761 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:58 crc kubenswrapper[4684]: I1013 13:19:58.219418 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69747b8d57-sm6gn"] Oct 13 13:19:58 crc kubenswrapper[4684]: I1013 13:19:58.280287 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" event={"ID":"6abba0b0-0a7b-4499-b252-0c906e14721d","Type":"ContainerStarted","Data":"67bb601b1b8359874c262b5c144ca31d4401a8e66dcec0cc75a3fc01cad277cd"} Oct 13 13:19:58 crc kubenswrapper[4684]: I1013 13:19:58.358790 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08819dbc-2a7c-4fe9-9084-eb6ce24c2857" path="/var/lib/kubelet/pods/08819dbc-2a7c-4fe9-9084-eb6ce24c2857/volumes" Oct 13 13:19:58 crc kubenswrapper[4684]: I1013 13:19:58.359671 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa90c071-3247-46ed-a635-b234d452ae89" path="/var/lib/kubelet/pods/aa90c071-3247-46ed-a635-b234d452ae89/volumes" Oct 13 13:19:58 crc kubenswrapper[4684]: I1013 13:19:58.399309 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7"] Oct 13 13:19:58 crc kubenswrapper[4684]: W1013 13:19:58.403265 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaac232ef_e594_4f92_a91b_4f759d3eb53e.slice/crio-9e4f1cd3a754c6eaae5c44acc20d30064d92058058a28203507950382d906673 WatchSource:0}: Error finding container 9e4f1cd3a754c6eaae5c44acc20d30064d92058058a28203507950382d906673: Status 404 returned error can't find the container with id 9e4f1cd3a754c6eaae5c44acc20d30064d92058058a28203507950382d906673 Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.287665 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" event={"ID":"6abba0b0-0a7b-4499-b252-0c906e14721d","Type":"ContainerStarted","Data":"1d10f5acb806fbf595a3769cfe6630181122b4ea8bd916a543eadeef72417b44"} Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.287995 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.290239 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" event={"ID":"aac232ef-e594-4f92-a91b-4f759d3eb53e","Type":"ContainerStarted","Data":"191dcb9bf81881ac7fd6ecd793f26e4a9828298f17f1b643ffaf234d60de62d7"} Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.290307 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" event={"ID":"aac232ef-e594-4f92-a91b-4f759d3eb53e","Type":"ContainerStarted","Data":"9e4f1cd3a754c6eaae5c44acc20d30064d92058058a28203507950382d906673"} Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.290449 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.295223 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.307231 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.308546 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-69747b8d57-sm6gn" podStartSLOduration=4.308530731 podStartE2EDuration="4.308530731s" podCreationTimestamp="2025-10-13 13:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:19:59.30721037 +0000 UTC m=+753.874594480" watchObservedRunningTime="2025-10-13 13:19:59.308530731 +0000 UTC m=+753.875914801" Oct 13 13:19:59 crc kubenswrapper[4684]: I1013 13:19:59.384443 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6f55d6f799-2hqh7" podStartSLOduration=4.384419978 podStartE2EDuration="4.384419978s" podCreationTimestamp="2025-10-13 13:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:19:59.333316331 +0000 UTC m=+753.900700411" watchObservedRunningTime="2025-10-13 13:19:59.384419978 +0000 UTC m=+753.951804048" Oct 13 13:20:00 crc kubenswrapper[4684]: I1013 13:20:00.523729 4684 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 13 13:20:00 crc kubenswrapper[4684]: I1013 13:20:00.560277 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:20:00 crc kubenswrapper[4684]: I1013 13:20:00.560335 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:20:00 crc kubenswrapper[4684]: I1013 13:20:00.560380 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:20:00 crc kubenswrapper[4684]: I1013 13:20:00.560911 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c52fe2f8e685623bf18216a39b095d01b746cd93787ab68efae18b207ec65af"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:20:00 crc kubenswrapper[4684]: I1013 13:20:00.560968 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://5c52fe2f8e685623bf18216a39b095d01b746cd93787ab68efae18b207ec65af" gracePeriod=600 Oct 13 13:20:01 crc kubenswrapper[4684]: I1013 13:20:01.303803 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="5c52fe2f8e685623bf18216a39b095d01b746cd93787ab68efae18b207ec65af" exitCode=0 Oct 13 13:20:01 crc kubenswrapper[4684]: I1013 13:20:01.303835 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"5c52fe2f8e685623bf18216a39b095d01b746cd93787ab68efae18b207ec65af"} Oct 13 13:20:01 crc kubenswrapper[4684]: I1013 13:20:01.304210 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"5279c7d29a155e7a0bc08fab5ed9b611b1802504a86b780e2e40dd66f636409f"} Oct 13 13:20:01 crc kubenswrapper[4684]: I1013 13:20:01.304235 4684 scope.go:117] "RemoveContainer" containerID="5d7bac5989bd561f1d776c849f654aeb770962f36d566b0607016c06463a1f0b" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.636184 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.637838 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.640743 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-mq5tg" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.648546 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.673975 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.675138 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.677121 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-llgcp" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.693960 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.699411 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.700570 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.703297 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-qlh5b" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.712264 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-rn562"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.713528 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.719307 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-979b4" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.723583 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-rn562"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.738722 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.740043 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.746428 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-w44r5" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.747532 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.759736 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.766798 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.770886 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.776507 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-24z2z" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.783021 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.789663 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5mkr\" (UniqueName: \"kubernetes.io/projected/b3b72bfd-179d-4a2c-bbcf-eb318658886d-kube-api-access-w5mkr\") pod \"barbican-operator-controller-manager-658bdf4b74-6mg9x\" (UID: \"b3b72bfd-179d-4a2c-bbcf-eb318658886d\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.789740 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlgvl\" (UniqueName: \"kubernetes.io/projected/67029ab5-dc79-4300-acc7-2e4ab2115809-kube-api-access-tlgvl\") pod \"designate-operator-controller-manager-85d5d9dd78-flhrp\" (UID: \"67029ab5-dc79-4300-acc7-2e4ab2115809\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.789812 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ndqk\" (UniqueName: \"kubernetes.io/projected/5db0ac5c-8b11-488c-8be7-14b040ddee3b-kube-api-access-5ndqk\") pod \"cinder-operator-controller-manager-7b7fb68549-fb6dz\" (UID: \"5db0ac5c-8b11-488c-8be7-14b040ddee3b\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.802985 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.804141 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.809800 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.810080 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-sk7km" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.810201 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.811549 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.817383 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-th8rl" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.829634 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.836449 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.866989 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.869356 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.873644 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-w8bjf" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.876511 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.893026 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v4gb\" (UniqueName: \"kubernetes.io/projected/de604670-9b37-401b-a41e-de24f939ddfa-kube-api-access-6v4gb\") pod \"infra-operator-controller-manager-656bcbd775-cdp57\" (UID: \"de604670-9b37-401b-a41e-de24f939ddfa\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896534 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ndqk\" (UniqueName: \"kubernetes.io/projected/5db0ac5c-8b11-488c-8be7-14b040ddee3b-kube-api-access-5ndqk\") pod \"cinder-operator-controller-manager-7b7fb68549-fb6dz\" (UID: \"5db0ac5c-8b11-488c-8be7-14b040ddee3b\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896653 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52pcs\" (UniqueName: \"kubernetes.io/projected/be62f268-d474-44c4-847d-3f43f4e3b724-kube-api-access-52pcs\") pod \"glance-operator-controller-manager-84b9b84486-rn562\" (UID: \"be62f268-d474-44c4-847d-3f43f4e3b724\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896695 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbqnl\" (UniqueName: \"kubernetes.io/projected/f5eb77ba-26c3-431f-88aa-43c9ca4e1137-kube-api-access-cbqnl\") pod \"horizon-operator-controller-manager-7ffbcb7588-k9gct\" (UID: \"f5eb77ba-26c3-431f-88aa-43c9ca4e1137\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/de604670-9b37-401b-a41e-de24f939ddfa-cert\") pod \"infra-operator-controller-manager-656bcbd775-cdp57\" (UID: \"de604670-9b37-401b-a41e-de24f939ddfa\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896759 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5mkr\" (UniqueName: \"kubernetes.io/projected/b3b72bfd-179d-4a2c-bbcf-eb318658886d-kube-api-access-w5mkr\") pod \"barbican-operator-controller-manager-658bdf4b74-6mg9x\" (UID: \"b3b72bfd-179d-4a2c-bbcf-eb318658886d\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896780 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv7wl\" (UniqueName: \"kubernetes.io/projected/23dafef9-64c0-4b93-b9e6-4c5d00a94a37-kube-api-access-vv7wl\") pod \"heat-operator-controller-manager-858f76bbdd-ggcts\" (UID: \"23dafef9-64c0-4b93-b9e6-4c5d00a94a37\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.896828 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlgvl\" (UniqueName: \"kubernetes.io/projected/67029ab5-dc79-4300-acc7-2e4ab2115809-kube-api-access-tlgvl\") pod \"designate-operator-controller-manager-85d5d9dd78-flhrp\" (UID: \"67029ab5-dc79-4300-acc7-2e4ab2115809\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.941196 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.942465 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.946542 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-tcxg6" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.947038 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ndqk\" (UniqueName: \"kubernetes.io/projected/5db0ac5c-8b11-488c-8be7-14b040ddee3b-kube-api-access-5ndqk\") pod \"cinder-operator-controller-manager-7b7fb68549-fb6dz\" (UID: \"5db0ac5c-8b11-488c-8be7-14b040ddee3b\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.947194 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.948293 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.954782 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlgvl\" (UniqueName: \"kubernetes.io/projected/67029ab5-dc79-4300-acc7-2e4ab2115809-kube-api-access-tlgvl\") pod \"designate-operator-controller-manager-85d5d9dd78-flhrp\" (UID: \"67029ab5-dc79-4300-acc7-2e4ab2115809\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.955111 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-b2bck" Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.965018 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f"] Oct 13 13:20:10 crc kubenswrapper[4684]: I1013 13:20:10.966737 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5mkr\" (UniqueName: \"kubernetes.io/projected/b3b72bfd-179d-4a2c-bbcf-eb318658886d-kube-api-access-w5mkr\") pod \"barbican-operator-controller-manager-658bdf4b74-6mg9x\" (UID: \"b3b72bfd-179d-4a2c-bbcf-eb318658886d\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.995335 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.996606 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.997180 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.998010 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-t94tb" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999615 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv7wl\" (UniqueName: \"kubernetes.io/projected/23dafef9-64c0-4b93-b9e6-4c5d00a94a37-kube-api-access-vv7wl\") pod \"heat-operator-controller-manager-858f76bbdd-ggcts\" (UID: \"23dafef9-64c0-4b93-b9e6-4c5d00a94a37\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999691 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b27fg\" (UniqueName: \"kubernetes.io/projected/a89d3329-f603-48e0-a781-726e723ddba2-kube-api-access-b27fg\") pod \"ironic-operator-controller-manager-9c5c78d49-fpcxc\" (UID: \"a89d3329-f603-48e0-a781-726e723ddba2\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999741 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v4gb\" (UniqueName: \"kubernetes.io/projected/de604670-9b37-401b-a41e-de24f939ddfa-kube-api-access-6v4gb\") pod \"infra-operator-controller-manager-656bcbd775-cdp57\" (UID: \"de604670-9b37-401b-a41e-de24f939ddfa\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999793 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7lw8\" (UniqueName: \"kubernetes.io/projected/c4cff4cc-1db4-4c49-821e-c24204a45224-kube-api-access-z7lw8\") pod \"keystone-operator-controller-manager-55b6b7c7b8-nw27r\" (UID: \"c4cff4cc-1db4-4c49-821e-c24204a45224\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999829 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52pcs\" (UniqueName: \"kubernetes.io/projected/be62f268-d474-44c4-847d-3f43f4e3b724-kube-api-access-52pcs\") pod \"glance-operator-controller-manager-84b9b84486-rn562\" (UID: \"be62f268-d474-44c4-847d-3f43f4e3b724\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999857 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbqnl\" (UniqueName: \"kubernetes.io/projected/f5eb77ba-26c3-431f-88aa-43c9ca4e1137-kube-api-access-cbqnl\") pod \"horizon-operator-controller-manager-7ffbcb7588-k9gct\" (UID: \"f5eb77ba-26c3-431f-88aa-43c9ca4e1137\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:10.999890 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/de604670-9b37-401b-a41e-de24f939ddfa-cert\") pod \"infra-operator-controller-manager-656bcbd775-cdp57\" (UID: \"de604670-9b37-401b-a41e-de24f939ddfa\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.009477 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/de604670-9b37-401b-a41e-de24f939ddfa-cert\") pod \"infra-operator-controller-manager-656bcbd775-cdp57\" (UID: \"de604670-9b37-401b-a41e-de24f939ddfa\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.019970 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.021271 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.023712 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.024893 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.029277 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.038085 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-f4pdn" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.047914 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.050584 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v4gb\" (UniqueName: \"kubernetes.io/projected/de604670-9b37-401b-a41e-de24f939ddfa-kube-api-access-6v4gb\") pod \"infra-operator-controller-manager-656bcbd775-cdp57\" (UID: \"de604670-9b37-401b-a41e-de24f939ddfa\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.061235 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv7wl\" (UniqueName: \"kubernetes.io/projected/23dafef9-64c0-4b93-b9e6-4c5d00a94a37-kube-api-access-vv7wl\") pod \"heat-operator-controller-manager-858f76bbdd-ggcts\" (UID: \"23dafef9-64c0-4b93-b9e6-4c5d00a94a37\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.061498 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbqnl\" (UniqueName: \"kubernetes.io/projected/f5eb77ba-26c3-431f-88aa-43c9ca4e1137-kube-api-access-cbqnl\") pod \"horizon-operator-controller-manager-7ffbcb7588-k9gct\" (UID: \"f5eb77ba-26c3-431f-88aa-43c9ca4e1137\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.061613 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52pcs\" (UniqueName: \"kubernetes.io/projected/be62f268-d474-44c4-847d-3f43f4e3b724-kube-api-access-52pcs\") pod \"glance-operator-controller-manager-84b9b84486-rn562\" (UID: \"be62f268-d474-44c4-847d-3f43f4e3b724\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.067388 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.068394 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.076243 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-2s2t4" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.076939 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.081719 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.098141 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.099406 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.099942 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.102775 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-4rl8w" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.114599 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.127392 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.130733 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135229 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b27fg\" (UniqueName: \"kubernetes.io/projected/a89d3329-f603-48e0-a781-726e723ddba2-kube-api-access-b27fg\") pod \"ironic-operator-controller-manager-9c5c78d49-fpcxc\" (UID: \"a89d3329-f603-48e0-a781-726e723ddba2\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135303 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7lw8\" (UniqueName: \"kubernetes.io/projected/c4cff4cc-1db4-4c49-821e-c24204a45224-kube-api-access-z7lw8\") pod \"keystone-operator-controller-manager-55b6b7c7b8-nw27r\" (UID: \"c4cff4cc-1db4-4c49-821e-c24204a45224\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135329 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmclz\" (UniqueName: \"kubernetes.io/projected/f9b4d8c9-c763-42ce-bb43-3661fd211396-kube-api-access-pmclz\") pod \"octavia-operator-controller-manager-69fdcfc5f5-hhc52\" (UID: \"f9b4d8c9-c763-42ce-bb43-3661fd211396\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135358 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n4m5\" (UniqueName: \"kubernetes.io/projected/fc06efe2-69bb-4b8c-bade-ac2f86aedf8f-kube-api-access-6n4m5\") pod \"ovn-operator-controller-manager-79df5fb58c-rjcff\" (UID: \"fc06efe2-69bb-4b8c-bade-ac2f86aedf8f\") " pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135378 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pprm5\" (UniqueName: \"kubernetes.io/projected/d99f99c5-94dd-4239-a410-b4983e91974f-kube-api-access-pprm5\") pod \"mariadb-operator-controller-manager-f9fb45f8f-l2r6f\" (UID: \"d99f99c5-94dd-4239-a410-b4983e91974f\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135400 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwqm9\" (UniqueName: \"kubernetes.io/projected/210a7413-8bab-4428-adc6-be6bb0037a24-kube-api-access-mwqm9\") pod \"neutron-operator-controller-manager-79d585cb66-cmd4z\" (UID: \"210a7413-8bab-4428-adc6-be6bb0037a24\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135428 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbxjx\" (UniqueName: \"kubernetes.io/projected/d6bea786-6aa7-4ad2-ae0f-5b61f4bde746-kube-api-access-lbxjx\") pod \"nova-operator-controller-manager-5df598886f-nrqjb\" (UID: \"d6bea786-6aa7-4ad2-ae0f-5b61f4bde746\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.135461 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz62c\" (UniqueName: \"kubernetes.io/projected/738e9416-e337-4ae6-89e3-40ce9e1843b6-kube-api-access-pz62c\") pod \"manila-operator-controller-manager-5f67fbc655-wmg4z\" (UID: \"738e9416-e337-4ae6-89e3-40ce9e1843b6\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.136279 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.137226 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.139236 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-cvjgl" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.149761 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.151857 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.155261 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.156380 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.165699 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-vxgnc" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.165933 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-hgpgm" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.170558 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b27fg\" (UniqueName: \"kubernetes.io/projected/a89d3329-f603-48e0-a781-726e723ddba2-kube-api-access-b27fg\") pod \"ironic-operator-controller-manager-9c5c78d49-fpcxc\" (UID: \"a89d3329-f603-48e0-a781-726e723ddba2\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.171061 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7lw8\" (UniqueName: \"kubernetes.io/projected/c4cff4cc-1db4-4c49-821e-c24204a45224-kube-api-access-z7lw8\") pod \"keystone-operator-controller-manager-55b6b7c7b8-nw27r\" (UID: \"c4cff4cc-1db4-4c49-821e-c24204a45224\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.189636 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.199819 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.210703 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.226060 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.238403 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmclz\" (UniqueName: \"kubernetes.io/projected/f9b4d8c9-c763-42ce-bb43-3661fd211396-kube-api-access-pmclz\") pod \"octavia-operator-controller-manager-69fdcfc5f5-hhc52\" (UID: \"f9b4d8c9-c763-42ce-bb43-3661fd211396\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.238439 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n4m5\" (UniqueName: \"kubernetes.io/projected/fc06efe2-69bb-4b8c-bade-ac2f86aedf8f-kube-api-access-6n4m5\") pod \"ovn-operator-controller-manager-79df5fb58c-rjcff\" (UID: \"fc06efe2-69bb-4b8c-bade-ac2f86aedf8f\") " pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.238460 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pprm5\" (UniqueName: \"kubernetes.io/projected/d99f99c5-94dd-4239-a410-b4983e91974f-kube-api-access-pprm5\") pod \"mariadb-operator-controller-manager-f9fb45f8f-l2r6f\" (UID: \"d99f99c5-94dd-4239-a410-b4983e91974f\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.238485 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwqm9\" (UniqueName: \"kubernetes.io/projected/210a7413-8bab-4428-adc6-be6bb0037a24-kube-api-access-mwqm9\") pod \"neutron-operator-controller-manager-79d585cb66-cmd4z\" (UID: \"210a7413-8bab-4428-adc6-be6bb0037a24\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.238512 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbxjx\" (UniqueName: \"kubernetes.io/projected/d6bea786-6aa7-4ad2-ae0f-5b61f4bde746-kube-api-access-lbxjx\") pod \"nova-operator-controller-manager-5df598886f-nrqjb\" (UID: \"d6bea786-6aa7-4ad2-ae0f-5b61f4bde746\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.238540 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz62c\" (UniqueName: \"kubernetes.io/projected/738e9416-e337-4ae6-89e3-40ce9e1843b6-kube-api-access-pz62c\") pod \"manila-operator-controller-manager-5f67fbc655-wmg4z\" (UID: \"738e9416-e337-4ae6-89e3-40ce9e1843b6\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.258667 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.267772 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.268732 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.272591 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-dzvkz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.273430 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmclz\" (UniqueName: \"kubernetes.io/projected/f9b4d8c9-c763-42ce-bb43-3661fd211396-kube-api-access-pmclz\") pod \"octavia-operator-controller-manager-69fdcfc5f5-hhc52\" (UID: \"f9b4d8c9-c763-42ce-bb43-3661fd211396\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.278436 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwqm9\" (UniqueName: \"kubernetes.io/projected/210a7413-8bab-4428-adc6-be6bb0037a24-kube-api-access-mwqm9\") pod \"neutron-operator-controller-manager-79d585cb66-cmd4z\" (UID: \"210a7413-8bab-4428-adc6-be6bb0037a24\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.280279 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pprm5\" (UniqueName: \"kubernetes.io/projected/d99f99c5-94dd-4239-a410-b4983e91974f-kube-api-access-pprm5\") pod \"mariadb-operator-controller-manager-f9fb45f8f-l2r6f\" (UID: \"d99f99c5-94dd-4239-a410-b4983e91974f\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.285174 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.288670 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n4m5\" (UniqueName: \"kubernetes.io/projected/fc06efe2-69bb-4b8c-bade-ac2f86aedf8f-kube-api-access-6n4m5\") pod \"ovn-operator-controller-manager-79df5fb58c-rjcff\" (UID: \"fc06efe2-69bb-4b8c-bade-ac2f86aedf8f\") " pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.296048 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-9cknq"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.297076 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.302616 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-77hdq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.304108 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbxjx\" (UniqueName: \"kubernetes.io/projected/d6bea786-6aa7-4ad2-ae0f-5b61f4bde746-kube-api-access-lbxjx\") pod \"nova-operator-controller-manager-5df598886f-nrqjb\" (UID: \"d6bea786-6aa7-4ad2-ae0f-5b61f4bde746\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.308031 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-9cknq"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.316050 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz62c\" (UniqueName: \"kubernetes.io/projected/738e9416-e337-4ae6-89e3-40ce9e1843b6-kube-api-access-pz62c\") pod \"manila-operator-controller-manager-5f67fbc655-wmg4z\" (UID: \"738e9416-e337-4ae6-89e3-40ce9e1843b6\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.339280 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rwmn\" (UniqueName: \"kubernetes.io/projected/77101b92-a0b5-4715-b426-918edba4833c-kube-api-access-2rwmn\") pod \"swift-operator-controller-manager-db6d7f97b-zbvph\" (UID: \"77101b92-a0b5-4715-b426-918edba4833c\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.339410 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-cert\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.339434 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8snd\" (UniqueName: \"kubernetes.io/projected/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-kube-api-access-n8snd\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.339469 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br6l6\" (UniqueName: \"kubernetes.io/projected/1ea7dea4-73ae-4444-a1e8-9704cb1f32e5-kube-api-access-br6l6\") pod \"placement-operator-controller-manager-68b6c87b68-l4m6r\" (UID: \"1ea7dea4-73ae-4444-a1e8-9704cb1f32e5\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.342741 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.373583 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.377038 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.379549 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-zbxxn" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.385341 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.395956 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.410659 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.449080 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-cert\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.449142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8snd\" (UniqueName: \"kubernetes.io/projected/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-kube-api-access-n8snd\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.449207 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br6l6\" (UniqueName: \"kubernetes.io/projected/1ea7dea4-73ae-4444-a1e8-9704cb1f32e5-kube-api-access-br6l6\") pod \"placement-operator-controller-manager-68b6c87b68-l4m6r\" (UID: \"1ea7dea4-73ae-4444-a1e8-9704cb1f32e5\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.449300 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rwmn\" (UniqueName: \"kubernetes.io/projected/77101b92-a0b5-4715-b426-918edba4833c-kube-api-access-2rwmn\") pod \"swift-operator-controller-manager-db6d7f97b-zbvph\" (UID: \"77101b92-a0b5-4715-b426-918edba4833c\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.449415 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jml7w\" (UniqueName: \"kubernetes.io/projected/34fad128-5332-4209-ba15-19cc84ccedb6-kube-api-access-jml7w\") pod \"test-operator-controller-manager-5458f77c4-9cknq\" (UID: \"34fad128-5332-4209-ba15-19cc84ccedb6\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.449438 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhbv8\" (UniqueName: \"kubernetes.io/projected/8fb7300d-97a2-4116-bc41-da6d686a12a1-kube-api-access-qhbv8\") pod \"telemetry-operator-controller-manager-67cfc6749b-jjkmq\" (UID: \"8fb7300d-97a2-4116-bc41-da6d686a12a1\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:11 crc kubenswrapper[4684]: E1013 13:20:11.449674 4684 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 13 13:20:11 crc kubenswrapper[4684]: E1013 13:20:11.449732 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-cert podName:a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8 nodeName:}" failed. No retries permitted until 2025-10-13 13:20:11.949708731 +0000 UTC m=+766.517092811 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-cert") pod "openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" (UID: "a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.456937 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.462053 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.468312 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.475067 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.476437 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.478331 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.479465 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-c2m6s" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.480655 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.482709 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br6l6\" (UniqueName: \"kubernetes.io/projected/1ea7dea4-73ae-4444-a1e8-9704cb1f32e5-kube-api-access-br6l6\") pod \"placement-operator-controller-manager-68b6c87b68-l4m6r\" (UID: \"1ea7dea4-73ae-4444-a1e8-9704cb1f32e5\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.483064 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8snd\" (UniqueName: \"kubernetes.io/projected/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-kube-api-access-n8snd\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.483365 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rwmn\" (UniqueName: \"kubernetes.io/projected/77101b92-a0b5-4715-b426-918edba4833c-kube-api-access-2rwmn\") pod \"swift-operator-controller-manager-db6d7f97b-zbvph\" (UID: \"77101b92-a0b5-4715-b426-918edba4833c\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.483446 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.501235 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.511428 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.512528 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.516237 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-x4mfx" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.525997 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.542193 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.549804 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.550555 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nlpq\" (UniqueName: \"kubernetes.io/projected/ba30b561-090d-4067-98e4-215b0c717ce2-kube-api-access-8nlpq\") pod \"watcher-operator-controller-manager-7f554bff7b-52z6n\" (UID: \"ba30b561-090d-4067-98e4-215b0c717ce2\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.550741 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhbv8\" (UniqueName: \"kubernetes.io/projected/8fb7300d-97a2-4116-bc41-da6d686a12a1-kube-api-access-qhbv8\") pod \"telemetry-operator-controller-manager-67cfc6749b-jjkmq\" (UID: \"8fb7300d-97a2-4116-bc41-da6d686a12a1\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.550766 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jml7w\" (UniqueName: \"kubernetes.io/projected/34fad128-5332-4209-ba15-19cc84ccedb6-kube-api-access-jml7w\") pod \"test-operator-controller-manager-5458f77c4-9cknq\" (UID: \"34fad128-5332-4209-ba15-19cc84ccedb6\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.554418 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.573137 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhbv8\" (UniqueName: \"kubernetes.io/projected/8fb7300d-97a2-4116-bc41-da6d686a12a1-kube-api-access-qhbv8\") pod \"telemetry-operator-controller-manager-67cfc6749b-jjkmq\" (UID: \"8fb7300d-97a2-4116-bc41-da6d686a12a1\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.584230 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jml7w\" (UniqueName: \"kubernetes.io/projected/34fad128-5332-4209-ba15-19cc84ccedb6-kube-api-access-jml7w\") pod \"test-operator-controller-manager-5458f77c4-9cknq\" (UID: \"34fad128-5332-4209-ba15-19cc84ccedb6\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.651732 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr7vc\" (UniqueName: \"kubernetes.io/projected/495f3276-fe48-4709-9e4e-2e57fbf3a5a6-kube-api-access-dr7vc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n\" (UID: \"495f3276-fe48-4709-9e4e-2e57fbf3a5a6\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.651817 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nlpq\" (UniqueName: \"kubernetes.io/projected/ba30b561-090d-4067-98e4-215b0c717ce2-kube-api-access-8nlpq\") pod \"watcher-operator-controller-manager-7f554bff7b-52z6n\" (UID: \"ba30b561-090d-4067-98e4-215b0c717ce2\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.651846 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a55aa929-2380-4b40-8d61-a9c00ea48536-cert\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.652533 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5bvq\" (UniqueName: \"kubernetes.io/projected/a55aa929-2380-4b40-8d61-a9c00ea48536-kube-api-access-h5bvq\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.678041 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.705836 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nlpq\" (UniqueName: \"kubernetes.io/projected/ba30b561-090d-4067-98e4-215b0c717ce2-kube-api-access-8nlpq\") pod \"watcher-operator-controller-manager-7f554bff7b-52z6n\" (UID: \"ba30b561-090d-4067-98e4-215b0c717ce2\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.708249 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.754613 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr7vc\" (UniqueName: \"kubernetes.io/projected/495f3276-fe48-4709-9e4e-2e57fbf3a5a6-kube-api-access-dr7vc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n\" (UID: \"495f3276-fe48-4709-9e4e-2e57fbf3a5a6\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.754690 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a55aa929-2380-4b40-8d61-a9c00ea48536-cert\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.754756 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5bvq\" (UniqueName: \"kubernetes.io/projected/a55aa929-2380-4b40-8d61-a9c00ea48536-kube-api-access-h5bvq\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:11 crc kubenswrapper[4684]: E1013 13:20:11.755267 4684 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 13 13:20:11 crc kubenswrapper[4684]: E1013 13:20:11.755346 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a55aa929-2380-4b40-8d61-a9c00ea48536-cert podName:a55aa929-2380-4b40-8d61-a9c00ea48536 nodeName:}" failed. No retries permitted until 2025-10-13 13:20:12.255327251 +0000 UTC m=+766.822711321 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a55aa929-2380-4b40-8d61-a9c00ea48536-cert") pod "openstack-operator-controller-manager-7fb8c88b76-kqqbz" (UID: "a55aa929-2380-4b40-8d61-a9c00ea48536") : secret "webhook-server-cert" not found Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.777463 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5bvq\" (UniqueName: \"kubernetes.io/projected/a55aa929-2380-4b40-8d61-a9c00ea48536-kube-api-access-h5bvq\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.783564 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr7vc\" (UniqueName: \"kubernetes.io/projected/495f3276-fe48-4709-9e4e-2e57fbf3a5a6-kube-api-access-dr7vc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n\" (UID: \"495f3276-fe48-4709-9e4e-2e57fbf3a5a6\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.784286 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.798066 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp"] Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.845743 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.959822 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-cert\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:11 crc kubenswrapper[4684]: I1013 13:20:11.971769 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8-cert\") pod \"openstack-baremetal-operator-controller-manager-55b7d44848pz9s5\" (UID: \"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.127053 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.265020 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a55aa929-2380-4b40-8d61-a9c00ea48536-cert\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.267770 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a55aa929-2380-4b40-8d61-a9c00ea48536-cert\") pod \"openstack-operator-controller-manager-7fb8c88b76-kqqbz\" (UID: \"a55aa929-2380-4b40-8d61-a9c00ea48536\") " pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.318196 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.326700 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.343554 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57"] Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.352560 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde604670_9b37_401b_a41e_de24f939ddfa.slice/crio-a054429862448c55fcaddbf2d8358816189f10cd383df43f1ccccfeb1e8347a1 WatchSource:0}: Error finding container a054429862448c55fcaddbf2d8358816189f10cd383df43f1ccccfeb1e8347a1: Status 404 returned error can't find the container with id a054429862448c55fcaddbf2d8358816189f10cd383df43f1ccccfeb1e8347a1 Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.399083 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" event={"ID":"67029ab5-dc79-4300-acc7-2e4ab2115809","Type":"ContainerStarted","Data":"15bfd3f3b1d598b9881aa785c6e1ded8808f981be2ee38a3fb9422d099ee21e1"} Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.400513 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" event={"ID":"de604670-9b37-401b-a41e-de24f939ddfa","Type":"ContainerStarted","Data":"a054429862448c55fcaddbf2d8358816189f10cd383df43f1ccccfeb1e8347a1"} Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.402152 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" event={"ID":"5db0ac5c-8b11-488c-8be7-14b040ddee3b","Type":"ContainerStarted","Data":"9ed4a7050adbaebc4fe923f97b8a7bfb7948033046396dc8e7603b9339688647"} Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.403360 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" event={"ID":"23dafef9-64c0-4b93-b9e6-4c5d00a94a37","Type":"ContainerStarted","Data":"9c6f9ef7eb8b9d1570e7d80c12f906057ff368d4534e324516ea5347610fec7f"} Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.404423 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" event={"ID":"f5eb77ba-26c3-431f-88aa-43c9ca4e1137","Type":"ContainerStarted","Data":"927f064e395523f8a0c1da60e44d4ffbc73f0f00855a4bb6999a60d605bd91d2"} Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.430638 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.872516 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.876144 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.911627 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.917098 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.924149 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-rn562"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.928063 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.932339 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff"] Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.935683 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z"] Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.939681 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda89d3329_f603_48e0_a781_726e723ddba2.slice/crio-847df4947e7559e7e6131967d03ccf0aa1bf2ba40683f6f270de09dfef837aa1 WatchSource:0}: Error finding container 847df4947e7559e7e6131967d03ccf0aa1bf2ba40683f6f270de09dfef837aa1: Status 404 returned error can't find the container with id 847df4947e7559e7e6131967d03ccf0aa1bf2ba40683f6f270de09dfef837aa1 Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.940267 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6bea786_6aa7_4ad2_ae0f_5b61f4bde746.slice/crio-d5b24c7d30cfcff1b812aedf426b918a20ad9bbe43822a8be6b41f0aa3206930 WatchSource:0}: Error finding container d5b24c7d30cfcff1b812aedf426b918a20ad9bbe43822a8be6b41f0aa3206930: Status 404 returned error can't find the container with id d5b24c7d30cfcff1b812aedf426b918a20ad9bbe43822a8be6b41f0aa3206930 Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.942638 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f"] Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.943408 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc06efe2_69bb_4b8c_bade_ac2f86aedf8f.slice/crio-9fb0cffe00c21f3c22adb638aeb5c3649e0455ebc88c67231d45f9cfb5ea75c5 WatchSource:0}: Error finding container 9fb0cffe00c21f3c22adb638aeb5c3649e0455ebc88c67231d45f9cfb5ea75c5: Status 404 returned error can't find the container with id 9fb0cffe00c21f3c22adb638aeb5c3649e0455ebc88c67231d45f9cfb5ea75c5 Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.943813 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9b4d8c9_c763_42ce_bb43_3661fd211396.slice/crio-52b40cd8acfe635f162af80357ee3e744531b8f1ddbb239a5bbaac79214f8f44 WatchSource:0}: Error finding container 52b40cd8acfe635f162af80357ee3e744531b8f1ddbb239a5bbaac79214f8f44: Status 404 returned error can't find the container with id 52b40cd8acfe635f162af80357ee3e744531b8f1ddbb239a5bbaac79214f8f44 Oct 13 13:20:12 crc kubenswrapper[4684]: I1013 13:20:12.948208 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r"] Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.960549 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod210a7413_8bab_4428_adc6_be6bb0037a24.slice/crio-840155a0d6e6e00e5908fd83e715cf35e62ccf85eeefd5d6a2a2c0729e6915d9 WatchSource:0}: Error finding container 840155a0d6e6e00e5908fd83e715cf35e62ccf85eeefd5d6a2a2c0729e6915d9: Status 404 returned error can't find the container with id 840155a0d6e6e00e5908fd83e715cf35e62ccf85eeefd5d6a2a2c0729e6915d9 Oct 13 13:20:12 crc kubenswrapper[4684]: W1013 13:20:12.964335 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod738e9416_e337_4ae6_89e3_40ce9e1843b6.slice/crio-c10cd90c9b62c6c6dbc7ca29a1542af7985f0a78dff191a853917b596bf6ed8c WatchSource:0}: Error finding container c10cd90c9b62c6c6dbc7ca29a1542af7985f0a78dff191a853917b596bf6ed8c: Status 404 returned error can't find the container with id c10cd90c9b62c6c6dbc7ca29a1542af7985f0a78dff191a853917b596bf6ed8c Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.246528 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.269606 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.273416 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.295242 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.297408 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-9cknq"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.354182 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.412396 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" event={"ID":"be62f268-d474-44c4-847d-3f43f4e3b724","Type":"ContainerStarted","Data":"b6a504b5486442cc7131cfcdfd11c359aaa35058f7b1ab7a4025d121fe38feb2"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.415551 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" event={"ID":"fc06efe2-69bb-4b8c-bade-ac2f86aedf8f","Type":"ContainerStarted","Data":"9fb0cffe00c21f3c22adb638aeb5c3649e0455ebc88c67231d45f9cfb5ea75c5"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.418008 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" event={"ID":"210a7413-8bab-4428-adc6-be6bb0037a24","Type":"ContainerStarted","Data":"840155a0d6e6e00e5908fd83e715cf35e62ccf85eeefd5d6a2a2c0729e6915d9"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.419216 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" event={"ID":"b3b72bfd-179d-4a2c-bbcf-eb318658886d","Type":"ContainerStarted","Data":"7a9fb6a7b725f673fd9d097ecb368c5a916abb779fee83955de2724428a39401"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.421767 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" event={"ID":"a89d3329-f603-48e0-a781-726e723ddba2","Type":"ContainerStarted","Data":"847df4947e7559e7e6131967d03ccf0aa1bf2ba40683f6f270de09dfef837aa1"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.426030 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" event={"ID":"d6bea786-6aa7-4ad2-ae0f-5b61f4bde746","Type":"ContainerStarted","Data":"d5b24c7d30cfcff1b812aedf426b918a20ad9bbe43822a8be6b41f0aa3206930"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.427520 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" event={"ID":"f9b4d8c9-c763-42ce-bb43-3661fd211396","Type":"ContainerStarted","Data":"52b40cd8acfe635f162af80357ee3e744531b8f1ddbb239a5bbaac79214f8f44"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.428742 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" event={"ID":"d99f99c5-94dd-4239-a410-b4983e91974f","Type":"ContainerStarted","Data":"0c6d2806b30ba4b56f8de23f71614e61ebbd18d51a4f010fe8d65e341698f76f"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.429569 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" event={"ID":"c4cff4cc-1db4-4c49-821e-c24204a45224","Type":"ContainerStarted","Data":"5ca3e69dc906af7b4b7807ba2aaf5bd69648934abbd103380735de928f27e454"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.430428 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" event={"ID":"738e9416-e337-4ae6-89e3-40ce9e1843b6","Type":"ContainerStarted","Data":"c10cd90c9b62c6c6dbc7ca29a1542af7985f0a78dff191a853917b596bf6ed8c"} Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.471588 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz"] Oct 13 13:20:13 crc kubenswrapper[4684]: I1013 13:20:13.505433 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5"] Oct 13 13:20:14 crc kubenswrapper[4684]: W1013 13:20:14.291664 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77101b92_a0b5_4715_b426_918edba4833c.slice/crio-45eb648769609ce17fd04728f510105a312b4f53e2a0f33b0b59e459c4bb8724 WatchSource:0}: Error finding container 45eb648769609ce17fd04728f510105a312b4f53e2a0f33b0b59e459c4bb8724: Status 404 returned error can't find the container with id 45eb648769609ce17fd04728f510105a312b4f53e2a0f33b0b59e459c4bb8724 Oct 13 13:20:14 crc kubenswrapper[4684]: W1013 13:20:14.300320 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod495f3276_fe48_4709_9e4e_2e57fbf3a5a6.slice/crio-d8575d243f5fd7475dac587eb7ff6883e2ced8804ec5d03900a3cecc6255b738 WatchSource:0}: Error finding container d8575d243f5fd7475dac587eb7ff6883e2ced8804ec5d03900a3cecc6255b738: Status 404 returned error can't find the container with id d8575d243f5fd7475dac587eb7ff6883e2ced8804ec5d03900a3cecc6255b738 Oct 13 13:20:14 crc kubenswrapper[4684]: I1013 13:20:14.436291 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" event={"ID":"8fb7300d-97a2-4116-bc41-da6d686a12a1","Type":"ContainerStarted","Data":"48a7bc2c8ccca4d39876169b6f4563febd2e2d568052e36a9ce7c0a74abfcf49"} Oct 13 13:20:14 crc kubenswrapper[4684]: I1013 13:20:14.437664 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" event={"ID":"34fad128-5332-4209-ba15-19cc84ccedb6","Type":"ContainerStarted","Data":"5cb7d3bd196d81ebe96dbdf93a38253e31ec57392fd1d574b7bdde49e14d2c3a"} Oct 13 13:20:14 crc kubenswrapper[4684]: I1013 13:20:14.439387 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" event={"ID":"ba30b561-090d-4067-98e4-215b0c717ce2","Type":"ContainerStarted","Data":"3b5339cecb0dbd8e197e8a750f5938ca30f0368e78e1df478ddc18a9d0420922"} Oct 13 13:20:14 crc kubenswrapper[4684]: I1013 13:20:14.441122 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" event={"ID":"77101b92-a0b5-4715-b426-918edba4833c","Type":"ContainerStarted","Data":"45eb648769609ce17fd04728f510105a312b4f53e2a0f33b0b59e459c4bb8724"} Oct 13 13:20:14 crc kubenswrapper[4684]: I1013 13:20:14.441944 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" event={"ID":"495f3276-fe48-4709-9e4e-2e57fbf3a5a6","Type":"ContainerStarted","Data":"d8575d243f5fd7475dac587eb7ff6883e2ced8804ec5d03900a3cecc6255b738"} Oct 13 13:20:15 crc kubenswrapper[4684]: I1013 13:20:15.458296 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" event={"ID":"a55aa929-2380-4b40-8d61-a9c00ea48536","Type":"ContainerStarted","Data":"24d76cfaee3326ce0b69697336914e1c447f4fd79c2684e05d7a0e6fd130df1b"} Oct 13 13:20:15 crc kubenswrapper[4684]: I1013 13:20:15.460226 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" event={"ID":"1ea7dea4-73ae-4444-a1e8-9704cb1f32e5","Type":"ContainerStarted","Data":"cde2b7d77b8ba28d3753931b2f57fa9d16ee43f4d7ab9a7075606e749a16f597"} Oct 13 13:20:15 crc kubenswrapper[4684]: I1013 13:20:15.461810 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" event={"ID":"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8","Type":"ContainerStarted","Data":"b7dbd1d670d6a378eac745a31ac84bc8aef156299d5fc697a2d698c8d234d6f6"} Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.016674 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nvrns"] Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.018838 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.030316 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nvrns"] Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.062205 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-utilities\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.062296 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-catalog-content\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.062470 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp6rq\" (UniqueName: \"kubernetes.io/projected/4d395242-0cc1-4f47-958d-ee109434b8d2-kube-api-access-sp6rq\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.163544 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-catalog-content\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.163882 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp6rq\" (UniqueName: \"kubernetes.io/projected/4d395242-0cc1-4f47-958d-ee109434b8d2-kube-api-access-sp6rq\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.163955 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-utilities\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.164127 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-catalog-content\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.164387 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-utilities\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.183989 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp6rq\" (UniqueName: \"kubernetes.io/projected/4d395242-0cc1-4f47-958d-ee109434b8d2-kube-api-access-sp6rq\") pod \"redhat-operators-nvrns\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:33 crc kubenswrapper[4684]: I1013 13:20:33.336135 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:34 crc kubenswrapper[4684]: E1013 13:20:34.179606 4684 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e" Oct 13 13:20:34 crc kubenswrapper[4684]: E1013 13:20:34.179862 4684 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2rwmn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-db6d7f97b-zbvph_openstack-operators(77101b92-a0b5-4715-b426-918edba4833c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 13:20:34 crc kubenswrapper[4684]: E1013 13:20:34.845525 4684 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca" Oct 13 13:20:34 crc kubenswrapper[4684]: E1013 13:20:34.845956 4684 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8nlpq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7f554bff7b-52z6n_openstack-operators(ba30b561-090d-4067-98e4-215b0c717ce2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 13:20:35 crc kubenswrapper[4684]: E1013 13:20:35.455704 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" podUID="77101b92-a0b5-4715-b426-918edba4833c" Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.538931 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nvrns"] Oct 13 13:20:35 crc kubenswrapper[4684]: W1013 13:20:35.603130 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d395242_0cc1_4f47_958d_ee109434b8d2.slice/crio-ee95cea1b8d05268bf6fb6123ec5f79cf89028bb49160ba4ea829d286d3d7349 WatchSource:0}: Error finding container ee95cea1b8d05268bf6fb6123ec5f79cf89028bb49160ba4ea829d286d3d7349: Status 404 returned error can't find the container with id ee95cea1b8d05268bf6fb6123ec5f79cf89028bb49160ba4ea829d286d3d7349 Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.624465 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" event={"ID":"67029ab5-dc79-4300-acc7-2e4ab2115809","Type":"ContainerStarted","Data":"27f69155e6ab62510b1ee3ab75c6c4beaf086ac1cf9dc0f8526287aedac1235f"} Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.625958 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvrns" event={"ID":"4d395242-0cc1-4f47-958d-ee109434b8d2","Type":"ContainerStarted","Data":"ee95cea1b8d05268bf6fb6123ec5f79cf89028bb49160ba4ea829d286d3d7349"} Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.628294 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" event={"ID":"f5eb77ba-26c3-431f-88aa-43c9ca4e1137","Type":"ContainerStarted","Data":"d3c8993810174e6a45bb06703d447108036c9a6f4094a31ab7ef24730a23067e"} Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.630760 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" event={"ID":"77101b92-a0b5-4715-b426-918edba4833c","Type":"ContainerStarted","Data":"49ea7435b6dccdd9d1c35ad9a7f853210cecafe2847fb905bbdb02e465943279"} Oct 13 13:20:35 crc kubenswrapper[4684]: E1013 13:20:35.632278 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" podUID="77101b92-a0b5-4715-b426-918edba4833c" Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.656118 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" event={"ID":"a55aa929-2380-4b40-8d61-a9c00ea48536","Type":"ContainerStarted","Data":"e252a59d10140803c9bc43494998248bbf2b0aa118386116f7f5d9b94c1b94e0"} Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.660254 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" event={"ID":"5db0ac5c-8b11-488c-8be7-14b040ddee3b","Type":"ContainerStarted","Data":"4b88bf8bc6f7041944a024e252aab4c03868bae7afa2d0eb6926952395a5fa21"} Oct 13 13:20:35 crc kubenswrapper[4684]: I1013 13:20:35.665588 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" event={"ID":"23dafef9-64c0-4b93-b9e6-4c5d00a94a37","Type":"ContainerStarted","Data":"910bcef2b8cae8d68f7ced517755d750950e7192dedc8a2760c9063164c1fbd4"} Oct 13 13:20:36 crc kubenswrapper[4684]: E1013 13:20:36.172193 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" podUID="ba30b561-090d-4067-98e4-215b0c717ce2" Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.696257 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" event={"ID":"fc06efe2-69bb-4b8c-bade-ac2f86aedf8f","Type":"ContainerStarted","Data":"ebaf6a085d89946cc9fbf1aa336d3c5ef84612b0583c600f86ab109a126b9fd0"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.718434 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" event={"ID":"f9b4d8c9-c763-42ce-bb43-3661fd211396","Type":"ContainerStarted","Data":"3e1ee4d7c5d940f12ab588ec2a926dec8c0c69c8b944353bfc6e857633589718"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.729429 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" event={"ID":"210a7413-8bab-4428-adc6-be6bb0037a24","Type":"ContainerStarted","Data":"57c7bbf5163ec96d81c16e80e19e892e0807497569ba77e0abcf76ee9937c227"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.730861 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" event={"ID":"b3b72bfd-179d-4a2c-bbcf-eb318658886d","Type":"ContainerStarted","Data":"fcf87020f1b2d7200c931db0966475288dec2ddab0dcdb35e7c55036af229e81"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.731940 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" event={"ID":"be62f268-d474-44c4-847d-3f43f4e3b724","Type":"ContainerStarted","Data":"05863b340d461e2dfee9b69344a233f531a484cedfabf52ef182ff4fdb246f97"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.767922 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" event={"ID":"d6bea786-6aa7-4ad2-ae0f-5b61f4bde746","Type":"ContainerStarted","Data":"304bc02b9b5784314bdfd5111be0f6f39efc019853d574c436117091e4e8c4be"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.855879 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" event={"ID":"de604670-9b37-401b-a41e-de24f939ddfa","Type":"ContainerStarted","Data":"d9be442e418d612760f3f8351e25068fe3886c6c947b0653fea8a776654ba50b"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.882218 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" event={"ID":"738e9416-e337-4ae6-89e3-40ce9e1843b6","Type":"ContainerStarted","Data":"74f37afbbe44764b40d7d05bc02005ab1cb85fe44c9d0a9d6f07fe0c98b6c8ed"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.906814 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" event={"ID":"a89d3329-f603-48e0-a781-726e723ddba2","Type":"ContainerStarted","Data":"d55b07c2e868b38a3d5fb6433ad0963d752739a2506daf51d4300a0216b407ec"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.911742 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" event={"ID":"a55aa929-2380-4b40-8d61-a9c00ea48536","Type":"ContainerStarted","Data":"8dc0926cd185b2c265dac1f9073ba8c553704705cd8f6994478e12106766ed9b"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.912710 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.930664 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" event={"ID":"8fb7300d-97a2-4116-bc41-da6d686a12a1","Type":"ContainerStarted","Data":"afce926ea616450c06a191c0f47562485112dcf2803bf2eef553e70e26d588aa"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.948698 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" event={"ID":"1ea7dea4-73ae-4444-a1e8-9704cb1f32e5","Type":"ContainerStarted","Data":"353315064b32a1aed1ebf20de44028c422efe9148779a1dd1311f5f7a92e9c77"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.979162 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" event={"ID":"23dafef9-64c0-4b93-b9e6-4c5d00a94a37","Type":"ContainerStarted","Data":"e15c2379965fba5cf493bf6460f9b2531e32201ff6227990851a70403aaccdad"} Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.980020 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:36 crc kubenswrapper[4684]: I1013 13:20:36.997377 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" event={"ID":"ba30b561-090d-4067-98e4-215b0c717ce2","Type":"ContainerStarted","Data":"9f79dfa3fd79a04c8eea63434f79e02c37033f02b6660ab8db10feb933b0e583"} Oct 13 13:20:36 crc kubenswrapper[4684]: E1013 13:20:36.999662 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" podUID="ba30b561-090d-4067-98e4-215b0c717ce2" Oct 13 13:20:37 crc kubenswrapper[4684]: I1013 13:20:37.002554 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" event={"ID":"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8","Type":"ContainerStarted","Data":"205b89420682148ce101933b708975c0e3e125027ae9b2b5edcf06fa63958653"} Oct 13 13:20:37 crc kubenswrapper[4684]: E1013 13:20:37.003788 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" podUID="77101b92-a0b5-4715-b426-918edba4833c" Oct 13 13:20:37 crc kubenswrapper[4684]: I1013 13:20:37.007289 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" podStartSLOduration=26.007267307 podStartE2EDuration="26.007267307s" podCreationTimestamp="2025-10-13 13:20:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:20:36.961879349 +0000 UTC m=+791.529263419" watchObservedRunningTime="2025-10-13 13:20:37.007267307 +0000 UTC m=+791.574651377" Oct 13 13:20:37 crc kubenswrapper[4684]: I1013 13:20:37.013631 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" podStartSLOduration=18.824054367 podStartE2EDuration="27.013608818s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.331739408 +0000 UTC m=+766.899123478" lastFinishedPulling="2025-10-13 13:20:20.521293859 +0000 UTC m=+775.088677929" observedRunningTime="2025-10-13 13:20:37.006462132 +0000 UTC m=+791.573846202" watchObservedRunningTime="2025-10-13 13:20:37.013608818 +0000 UTC m=+791.580992888" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.012764 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" event={"ID":"34fad128-5332-4209-ba15-19cc84ccedb6","Type":"ContainerStarted","Data":"82ebb307cffa6bcbc336bb162646fe458577b50f057331da0f1cd8532d3b35e4"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.013160 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.013179 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" event={"ID":"34fad128-5332-4209-ba15-19cc84ccedb6","Type":"ContainerStarted","Data":"4e880d3f11ef11845a53a30b49f467f3d2df67ccbf6a0d14c2fc733327a9faab"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.015158 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" event={"ID":"f9b4d8c9-c763-42ce-bb43-3661fd211396","Type":"ContainerStarted","Data":"d0209ff8bf443f52bc81cff0ec36886497adae5841f1c3d2bb0d4e5328de555a"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.015307 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.017562 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" event={"ID":"de604670-9b37-401b-a41e-de24f939ddfa","Type":"ContainerStarted","Data":"7a60f711024e97a8ede878c77802969624f6469fb823f6f4a77c75adc047535d"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.018128 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.021144 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" event={"ID":"210a7413-8bab-4428-adc6-be6bb0037a24","Type":"ContainerStarted","Data":"65d80e388e849041f97ea0c97d0b2bd3b0ec32f642e2d1fa3582382fb62d43ff"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.021704 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.026584 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" event={"ID":"67029ab5-dc79-4300-acc7-2e4ab2115809","Type":"ContainerStarted","Data":"638e93f11fa2dd087e62d43002e6aa9018dca62e8b32d9230814282e282d7b18"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.027172 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.029047 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" event={"ID":"8fb7300d-97a2-4116-bc41-da6d686a12a1","Type":"ContainerStarted","Data":"79e94bcc38a59210bdc95ed13cdd0c6aba56763cd53d0bcf6361451ccacdafe4"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.029420 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.031951 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" event={"ID":"a89d3329-f603-48e0-a781-726e723ddba2","Type":"ContainerStarted","Data":"661efc7b790f0788d54fb25f156bf291af9a56629d5087fefd4727151b09efd0"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.032383 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.034188 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" event={"ID":"d99f99c5-94dd-4239-a410-b4983e91974f","Type":"ContainerStarted","Data":"1859a98164a9d041c7b57fe190abbcaa325ec2df275b97d3493d228dbc1ebac6"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.034206 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" event={"ID":"d99f99c5-94dd-4239-a410-b4983e91974f","Type":"ContainerStarted","Data":"f4e02bb0aa665662970c8e58181cdef234e8d1a82a5f933e75801b84ca06c772"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.034538 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.036198 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" podStartSLOduration=6.28326671 podStartE2EDuration="27.03615945s" podCreationTimestamp="2025-10-13 13:20:11 +0000 UTC" firstStartedPulling="2025-10-13 13:20:14.284741115 +0000 UTC m=+768.852125185" lastFinishedPulling="2025-10-13 13:20:35.037633855 +0000 UTC m=+789.605017925" observedRunningTime="2025-10-13 13:20:38.034857108 +0000 UTC m=+792.602241178" watchObservedRunningTime="2025-10-13 13:20:38.03615945 +0000 UTC m=+792.603543520" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.036495 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" event={"ID":"c4cff4cc-1db4-4c49-821e-c24204a45224","Type":"ContainerStarted","Data":"c403c28799dd635c461b3aa2b03b09a0de62046caaf72414ebb3df5bcd3d2174"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.036515 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" event={"ID":"c4cff4cc-1db4-4c49-821e-c24204a45224","Type":"ContainerStarted","Data":"90303b7a7cb807c726f40d16f10dab69b459037c31b02fc17944ca128294794e"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.036918 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.038721 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" event={"ID":"b3b72bfd-179d-4a2c-bbcf-eb318658886d","Type":"ContainerStarted","Data":"15cf9785b5483d3676c967d071722e4d2a0d821712774de9f84ca980765aef9e"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.039122 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.043258 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" event={"ID":"1ea7dea4-73ae-4444-a1e8-9704cb1f32e5","Type":"ContainerStarted","Data":"825d5c2e211d198ab2f104f70f626cd3e46a793f47e3a0ada3acf4cb5e982b55"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.043829 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.046499 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" event={"ID":"fc06efe2-69bb-4b8c-bade-ac2f86aedf8f","Type":"ContainerStarted","Data":"416a884fc978177962132ae0c205760652e2d60e28d8f987ddf5343221632c3a"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.046579 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.048408 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" event={"ID":"f5eb77ba-26c3-431f-88aa-43c9ca4e1137","Type":"ContainerStarted","Data":"99d182f9dabfcb0a154b91f5abe403e8c98fdc57f0da7956687e473926b89bfe"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.048527 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.049693 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" event={"ID":"495f3276-fe48-4709-9e4e-2e57fbf3a5a6","Type":"ContainerStarted","Data":"ab395adce061db3296adbd47c8e7c59176c91f2e0917f219b433561f8b8eb55e"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.052398 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerID="9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee" exitCode=0 Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.052511 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvrns" event={"ID":"4d395242-0cc1-4f47-958d-ee109434b8d2","Type":"ContainerDied","Data":"9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.054139 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" event={"ID":"5db0ac5c-8b11-488c-8be7-14b040ddee3b","Type":"ContainerStarted","Data":"f1499fbca34835aabff574307fb7214a1542f3678faff062ee8770602620919a"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.054261 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.059314 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" event={"ID":"be62f268-d474-44c4-847d-3f43f4e3b724","Type":"ContainerStarted","Data":"32d1c8dc4e6f28fcbb79611dd197a0dde894b8f85108088aa456eac55f5e8f7c"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.059793 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.061157 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" event={"ID":"d6bea786-6aa7-4ad2-ae0f-5b61f4bde746","Type":"ContainerStarted","Data":"aa1415030da61dee908d5f9e626212179fe699954a49ec9c4a72442725c25cab"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.061312 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.071870 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" event={"ID":"a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8","Type":"ContainerStarted","Data":"cefbfd194f400bdf200c7bbc9e9fb686ff1edaf918ad2e112cb9c5f4343b26fd"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.072050 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.075027 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" event={"ID":"738e9416-e337-4ae6-89e3-40ce9e1843b6","Type":"ContainerStarted","Data":"f0a858ffe780fa81185b2a6a69a7f0595ba4e9e9242c4d2adedd7fcd9873ef3b"} Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.075060 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:38 crc kubenswrapper[4684]: E1013 13:20:38.076323 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" podUID="ba30b561-090d-4067-98e4-215b0c717ce2" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.092268 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" podStartSLOduration=6.402365831 podStartE2EDuration="27.092248148s" podCreationTimestamp="2025-10-13 13:20:11 +0000 UTC" firstStartedPulling="2025-10-13 13:20:14.302495644 +0000 UTC m=+768.869879714" lastFinishedPulling="2025-10-13 13:20:34.992377951 +0000 UTC m=+789.559762031" observedRunningTime="2025-10-13 13:20:38.071213631 +0000 UTC m=+792.638597721" watchObservedRunningTime="2025-10-13 13:20:38.092248148 +0000 UTC m=+792.659632218" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.094266 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" podStartSLOduration=13.561939707 podStartE2EDuration="28.094260301s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.355480855 +0000 UTC m=+766.922864925" lastFinishedPulling="2025-10-13 13:20:26.887801459 +0000 UTC m=+781.455185519" observedRunningTime="2025-10-13 13:20:38.089894754 +0000 UTC m=+792.657278824" watchObservedRunningTime="2025-10-13 13:20:38.094260301 +0000 UTC m=+792.661644371" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.118764 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" podStartSLOduration=19.461915211 podStartE2EDuration="28.118746087s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:11.866526958 +0000 UTC m=+766.433911028" lastFinishedPulling="2025-10-13 13:20:20.523357834 +0000 UTC m=+775.090741904" observedRunningTime="2025-10-13 13:20:38.118123958 +0000 UTC m=+792.685508028" watchObservedRunningTime="2025-10-13 13:20:38.118746087 +0000 UTC m=+792.686130157" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.165354 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" podStartSLOduration=6.14170136 podStartE2EDuration="28.165336365s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.944622412 +0000 UTC m=+767.512006482" lastFinishedPulling="2025-10-13 13:20:34.968257407 +0000 UTC m=+789.535641487" observedRunningTime="2025-10-13 13:20:38.141690585 +0000 UTC m=+792.709074655" watchObservedRunningTime="2025-10-13 13:20:38.165336365 +0000 UTC m=+792.732720435" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.166893 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" podStartSLOduration=6.126554938 podStartE2EDuration="28.166885213s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.950495507 +0000 UTC m=+767.517879577" lastFinishedPulling="2025-10-13 13:20:34.990825772 +0000 UTC m=+789.558209852" observedRunningTime="2025-10-13 13:20:38.163372423 +0000 UTC m=+792.730756503" watchObservedRunningTime="2025-10-13 13:20:38.166885213 +0000 UTC m=+792.734269283" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.200982 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" podStartSLOduration=6.158626791 podStartE2EDuration="28.200961664s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.964835967 +0000 UTC m=+767.532220037" lastFinishedPulling="2025-10-13 13:20:35.00717084 +0000 UTC m=+789.574554910" observedRunningTime="2025-10-13 13:20:38.194165819 +0000 UTC m=+792.761549909" watchObservedRunningTime="2025-10-13 13:20:38.200961664 +0000 UTC m=+792.768345734" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.229643 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" podStartSLOduration=7.020518963 podStartE2EDuration="28.229620812s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.950758484 +0000 UTC m=+767.518142554" lastFinishedPulling="2025-10-13 13:20:34.159860333 +0000 UTC m=+788.727244403" observedRunningTime="2025-10-13 13:20:38.22325551 +0000 UTC m=+792.790639590" watchObservedRunningTime="2025-10-13 13:20:38.229620812 +0000 UTC m=+792.797004882" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.248216 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" podStartSLOduration=6.207271386 podStartE2EDuration="28.248199281s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.951350103 +0000 UTC m=+767.518734173" lastFinishedPulling="2025-10-13 13:20:34.992277998 +0000 UTC m=+789.559662068" observedRunningTime="2025-10-13 13:20:38.244257006 +0000 UTC m=+792.811641096" watchObservedRunningTime="2025-10-13 13:20:38.248199281 +0000 UTC m=+792.815583351" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.268229 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" podStartSLOduration=6.242625528 podStartE2EDuration="28.268212535s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.942638219 +0000 UTC m=+767.510022289" lastFinishedPulling="2025-10-13 13:20:34.968225216 +0000 UTC m=+789.535609296" observedRunningTime="2025-10-13 13:20:38.267537574 +0000 UTC m=+792.834921644" watchObservedRunningTime="2025-10-13 13:20:38.268212535 +0000 UTC m=+792.835596595" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.315245 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" podStartSLOduration=6.296749232 podStartE2EDuration="28.315231535s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.950805146 +0000 UTC m=+767.518189216" lastFinishedPulling="2025-10-13 13:20:34.969287439 +0000 UTC m=+789.536671519" observedRunningTime="2025-10-13 13:20:38.313634125 +0000 UTC m=+792.881018195" watchObservedRunningTime="2025-10-13 13:20:38.315231535 +0000 UTC m=+792.882615605" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.315932 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" podStartSLOduration=6.293408022 podStartE2EDuration="28.315926378s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.968510783 +0000 UTC m=+767.535894843" lastFinishedPulling="2025-10-13 13:20:34.991029119 +0000 UTC m=+789.558413199" observedRunningTime="2025-10-13 13:20:38.293956372 +0000 UTC m=+792.861340432" watchObservedRunningTime="2025-10-13 13:20:38.315926378 +0000 UTC m=+792.883310448" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.337888 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n" podStartSLOduration=6.671773439 podStartE2EDuration="27.337874823s" podCreationTimestamp="2025-10-13 13:20:11 +0000 UTC" firstStartedPulling="2025-10-13 13:20:14.302104981 +0000 UTC m=+768.869489051" lastFinishedPulling="2025-10-13 13:20:34.968206365 +0000 UTC m=+789.535590435" observedRunningTime="2025-10-13 13:20:38.332880045 +0000 UTC m=+792.900264115" watchObservedRunningTime="2025-10-13 13:20:38.337874823 +0000 UTC m=+792.905258893" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.356619 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" podStartSLOduration=8.599440675 podStartE2EDuration="28.356601638s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:15.221272826 +0000 UTC m=+769.788656896" lastFinishedPulling="2025-10-13 13:20:34.978433779 +0000 UTC m=+789.545817859" observedRunningTime="2025-10-13 13:20:38.354722767 +0000 UTC m=+792.922106847" watchObservedRunningTime="2025-10-13 13:20:38.356601638 +0000 UTC m=+792.923985708" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.396545 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" podStartSLOduration=6.372671908 podStartE2EDuration="28.396529052s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.952306153 +0000 UTC m=+767.519690223" lastFinishedPulling="2025-10-13 13:20:34.976163297 +0000 UTC m=+789.543547367" observedRunningTime="2025-10-13 13:20:38.391062319 +0000 UTC m=+792.958446389" watchObservedRunningTime="2025-10-13 13:20:38.396529052 +0000 UTC m=+792.963913122" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.434509 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" podStartSLOduration=19.538719027 podStartE2EDuration="28.434488116s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:11.627676217 +0000 UTC m=+766.195060287" lastFinishedPulling="2025-10-13 13:20:20.523445306 +0000 UTC m=+775.090829376" observedRunningTime="2025-10-13 13:20:38.431179291 +0000 UTC m=+792.998563381" watchObservedRunningTime="2025-10-13 13:20:38.434488116 +0000 UTC m=+793.001872196" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.443038 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2v95t"] Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.444652 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.457529 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2v95t"] Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.459616 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" podStartSLOduration=14.460152664 podStartE2EDuration="28.459598302s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.332432559 +0000 UTC m=+766.899816639" lastFinishedPulling="2025-10-13 13:20:26.331878187 +0000 UTC m=+780.899262277" observedRunningTime="2025-10-13 13:20:38.452704563 +0000 UTC m=+793.020088633" watchObservedRunningTime="2025-10-13 13:20:38.459598302 +0000 UTC m=+793.026982372" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.478736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-utilities\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.478879 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whl5m\" (UniqueName: \"kubernetes.io/projected/d7001d9c-bf79-45f6-ad84-c81dba4966d7-kube-api-access-whl5m\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.478964 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-catalog-content\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.543942 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" podStartSLOduration=8.75734126 podStartE2EDuration="28.543920985s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:15.220525853 +0000 UTC m=+769.787909923" lastFinishedPulling="2025-10-13 13:20:35.007105578 +0000 UTC m=+789.574489648" observedRunningTime="2025-10-13 13:20:38.541985013 +0000 UTC m=+793.109369103" watchObservedRunningTime="2025-10-13 13:20:38.543920985 +0000 UTC m=+793.111305055" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.568042 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" podStartSLOduration=6.5158673369999995 podStartE2EDuration="28.568023359s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:12.917616762 +0000 UTC m=+767.485000832" lastFinishedPulling="2025-10-13 13:20:34.969772784 +0000 UTC m=+789.537156854" observedRunningTime="2025-10-13 13:20:38.564840658 +0000 UTC m=+793.132224728" watchObservedRunningTime="2025-10-13 13:20:38.568023359 +0000 UTC m=+793.135407429" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.579968 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whl5m\" (UniqueName: \"kubernetes.io/projected/d7001d9c-bf79-45f6-ad84-c81dba4966d7-kube-api-access-whl5m\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.580065 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-catalog-content\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.580114 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-utilities\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.580768 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-catalog-content\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.580819 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-utilities\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.632203 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whl5m\" (UniqueName: \"kubernetes.io/projected/d7001d9c-bf79-45f6-ad84-c81dba4966d7-kube-api-access-whl5m\") pod \"certified-operators-2v95t\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:38 crc kubenswrapper[4684]: I1013 13:20:38.762279 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:39 crc kubenswrapper[4684]: I1013 13:20:39.236591 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2v95t"] Oct 13 13:20:39 crc kubenswrapper[4684]: W1013 13:20:39.246307 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7001d9c_bf79_45f6_ad84_c81dba4966d7.slice/crio-26765a6398b516637b32187a60cc0b80ccb6b6082724e76b4298dc17a24c3aa2 WatchSource:0}: Error finding container 26765a6398b516637b32187a60cc0b80ccb6b6082724e76b4298dc17a24c3aa2: Status 404 returned error can't find the container with id 26765a6398b516637b32187a60cc0b80ccb6b6082724e76b4298dc17a24c3aa2 Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.097414 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerID="9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c" exitCode=0 Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.097566 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvrns" event={"ID":"4d395242-0cc1-4f47-958d-ee109434b8d2","Type":"ContainerDied","Data":"9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c"} Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.099793 4684 generic.go:334] "Generic (PLEG): container finished" podID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerID="943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe" exitCode=0 Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.099924 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2v95t" event={"ID":"d7001d9c-bf79-45f6-ad84-c81dba4966d7","Type":"ContainerDied","Data":"943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe"} Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.099965 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2v95t" event={"ID":"d7001d9c-bf79-45f6-ad84-c81dba4966d7","Type":"ContainerStarted","Data":"26765a6398b516637b32187a60cc0b80ccb6b6082724e76b4298dc17a24c3aa2"} Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.105080 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-flhrp" Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.105142 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-cmd4z" Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.109135 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-cdp57" Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.110046 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-l4m6r" Oct 13 13:20:40 crc kubenswrapper[4684]: I1013 13:20:40.112180 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55b7d44848pz9s5" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.000888 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-fb6dz" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.080270 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-ggcts" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.105557 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-k9gct" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.135612 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvrns" event={"ID":"4d395242-0cc1-4f47-958d-ee109434b8d2","Type":"ContainerStarted","Data":"10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460"} Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.173255 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nvrns" podStartSLOduration=5.622833236 podStartE2EDuration="8.173231756s" podCreationTimestamp="2025-10-13 13:20:33 +0000 UTC" firstStartedPulling="2025-10-13 13:20:38.053754538 +0000 UTC m=+792.621138608" lastFinishedPulling="2025-10-13 13:20:40.604153058 +0000 UTC m=+795.171537128" observedRunningTime="2025-10-13 13:20:41.164441308 +0000 UTC m=+795.731825378" watchObservedRunningTime="2025-10-13 13:20:41.173231756 +0000 UTC m=+795.740615826" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.262069 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-6mg9x" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.345542 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-rn562" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.401478 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-wmg4z" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.467442 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-fpcxc" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.470586 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5df598886f-nrqjb" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.488725 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-hhc52" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.509570 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-79df5fb58c-rjcff" Oct 13 13:20:41 crc kubenswrapper[4684]: I1013 13:20:41.680986 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-jjkmq" Oct 13 13:20:42 crc kubenswrapper[4684]: I1013 13:20:42.143447 4684 generic.go:334] "Generic (PLEG): container finished" podID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerID="892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3" exitCode=0 Oct 13 13:20:42 crc kubenswrapper[4684]: I1013 13:20:42.143506 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2v95t" event={"ID":"d7001d9c-bf79-45f6-ad84-c81dba4966d7","Type":"ContainerDied","Data":"892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3"} Oct 13 13:20:42 crc kubenswrapper[4684]: I1013 13:20:42.442852 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7fb8c88b76-kqqbz" Oct 13 13:20:43 crc kubenswrapper[4684]: I1013 13:20:43.152844 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2v95t" event={"ID":"d7001d9c-bf79-45f6-ad84-c81dba4966d7","Type":"ContainerStarted","Data":"0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a"} Oct 13 13:20:43 crc kubenswrapper[4684]: I1013 13:20:43.336994 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:43 crc kubenswrapper[4684]: I1013 13:20:43.337045 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:44 crc kubenswrapper[4684]: I1013 13:20:44.374596 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nvrns" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="registry-server" probeResult="failure" output=< Oct 13 13:20:44 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:20:44 crc kubenswrapper[4684]: > Oct 13 13:20:48 crc kubenswrapper[4684]: I1013 13:20:48.762925 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:48 crc kubenswrapper[4684]: I1013 13:20:48.763334 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:48 crc kubenswrapper[4684]: I1013 13:20:48.835331 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:48 crc kubenswrapper[4684]: I1013 13:20:48.855043 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2v95t" podStartSLOduration=8.395993864 podStartE2EDuration="10.855014887s" podCreationTimestamp="2025-10-13 13:20:38 +0000 UTC" firstStartedPulling="2025-10-13 13:20:40.105492502 +0000 UTC m=+794.672876612" lastFinishedPulling="2025-10-13 13:20:42.564513565 +0000 UTC m=+797.131897635" observedRunningTime="2025-10-13 13:20:43.179692746 +0000 UTC m=+797.747076826" watchObservedRunningTime="2025-10-13 13:20:48.855014887 +0000 UTC m=+803.422398977" Oct 13 13:20:49 crc kubenswrapper[4684]: I1013 13:20:49.242385 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:49 crc kubenswrapper[4684]: I1013 13:20:49.284277 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2v95t"] Oct 13 13:20:50 crc kubenswrapper[4684]: I1013 13:20:50.204929 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" event={"ID":"77101b92-a0b5-4715-b426-918edba4833c","Type":"ContainerStarted","Data":"cef4ae951ed1e27f8c37ffed7f12640f6ba503dec6e60a3021acc81f80b2350e"} Oct 13 13:20:50 crc kubenswrapper[4684]: I1013 13:20:50.205220 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:20:50 crc kubenswrapper[4684]: I1013 13:20:50.221057 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" podStartSLOduration=4.894071857 podStartE2EDuration="40.221041377s" podCreationTimestamp="2025-10-13 13:20:10 +0000 UTC" firstStartedPulling="2025-10-13 13:20:14.294917455 +0000 UTC m=+768.862301525" lastFinishedPulling="2025-10-13 13:20:49.621886975 +0000 UTC m=+804.189271045" observedRunningTime="2025-10-13 13:20:50.218329791 +0000 UTC m=+804.785713911" watchObservedRunningTime="2025-10-13 13:20:50.221041377 +0000 UTC m=+804.788425447" Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.203714 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-nw27r" Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.211089 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2v95t" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="registry-server" containerID="cri-o://0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a" gracePeriod=2 Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.394609 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-l2r6f" Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.712892 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5458f77c4-9cknq" Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.789500 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.966084 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-utilities\") pod \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.966197 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-catalog-content\") pod \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.966295 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whl5m\" (UniqueName: \"kubernetes.io/projected/d7001d9c-bf79-45f6-ad84-c81dba4966d7-kube-api-access-whl5m\") pod \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\" (UID: \"d7001d9c-bf79-45f6-ad84-c81dba4966d7\") " Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.967259 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-utilities" (OuterVolumeSpecName: "utilities") pod "d7001d9c-bf79-45f6-ad84-c81dba4966d7" (UID: "d7001d9c-bf79-45f6-ad84-c81dba4966d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:20:51 crc kubenswrapper[4684]: I1013 13:20:51.971456 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7001d9c-bf79-45f6-ad84-c81dba4966d7-kube-api-access-whl5m" (OuterVolumeSpecName: "kube-api-access-whl5m") pod "d7001d9c-bf79-45f6-ad84-c81dba4966d7" (UID: "d7001d9c-bf79-45f6-ad84-c81dba4966d7"). InnerVolumeSpecName "kube-api-access-whl5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.068310 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.068344 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whl5m\" (UniqueName: \"kubernetes.io/projected/d7001d9c-bf79-45f6-ad84-c81dba4966d7-kube-api-access-whl5m\") on node \"crc\" DevicePath \"\"" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.133139 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7001d9c-bf79-45f6-ad84-c81dba4966d7" (UID: "d7001d9c-bf79-45f6-ad84-c81dba4966d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.169702 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7001d9c-bf79-45f6-ad84-c81dba4966d7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.220029 4684 generic.go:334] "Generic (PLEG): container finished" podID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerID="0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a" exitCode=0 Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.220076 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2v95t" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.220073 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2v95t" event={"ID":"d7001d9c-bf79-45f6-ad84-c81dba4966d7","Type":"ContainerDied","Data":"0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a"} Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.220193 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2v95t" event={"ID":"d7001d9c-bf79-45f6-ad84-c81dba4966d7","Type":"ContainerDied","Data":"26765a6398b516637b32187a60cc0b80ccb6b6082724e76b4298dc17a24c3aa2"} Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.220214 4684 scope.go:117] "RemoveContainer" containerID="0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.236258 4684 scope.go:117] "RemoveContainer" containerID="892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.247765 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2v95t"] Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.259233 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2v95t"] Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.274467 4684 scope.go:117] "RemoveContainer" containerID="943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.299119 4684 scope.go:117] "RemoveContainer" containerID="0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a" Oct 13 13:20:52 crc kubenswrapper[4684]: E1013 13:20:52.299560 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a\": container with ID starting with 0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a not found: ID does not exist" containerID="0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.299627 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a"} err="failed to get container status \"0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a\": rpc error: code = NotFound desc = could not find container \"0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a\": container with ID starting with 0c10545b2be3f8e6e2b143dbd41dd26f736e6e6c1b9390a9fc8a588455ccb33a not found: ID does not exist" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.299653 4684 scope.go:117] "RemoveContainer" containerID="892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3" Oct 13 13:20:52 crc kubenswrapper[4684]: E1013 13:20:52.300065 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3\": container with ID starting with 892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3 not found: ID does not exist" containerID="892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.300093 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3"} err="failed to get container status \"892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3\": rpc error: code = NotFound desc = could not find container \"892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3\": container with ID starting with 892110737a3a99e5584f5bd6feeb293922a45c30d2ee4d7743a5a900a188ecb3 not found: ID does not exist" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.300107 4684 scope.go:117] "RemoveContainer" containerID="943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe" Oct 13 13:20:52 crc kubenswrapper[4684]: E1013 13:20:52.300502 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe\": container with ID starting with 943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe not found: ID does not exist" containerID="943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.300570 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe"} err="failed to get container status \"943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe\": rpc error: code = NotFound desc = could not find container \"943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe\": container with ID starting with 943f8b491543f26e1bd35f79f8a62a72b4482bcf09424ad6b016c0deda523dbe not found: ID does not exist" Oct 13 13:20:52 crc kubenswrapper[4684]: I1013 13:20:52.362425 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" path="/var/lib/kubelet/pods/d7001d9c-bf79-45f6-ad84-c81dba4966d7/volumes" Oct 13 13:20:53 crc kubenswrapper[4684]: I1013 13:20:53.245647 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" event={"ID":"ba30b561-090d-4067-98e4-215b0c717ce2","Type":"ContainerStarted","Data":"abcfc5fec896106d59dbd16b0dceffbac9a0bd30be5b878cf13f75032d9040ba"} Oct 13 13:20:53 crc kubenswrapper[4684]: I1013 13:20:53.246197 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:20:53 crc kubenswrapper[4684]: I1013 13:20:53.267189 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" podStartSLOduration=3.765768092 podStartE2EDuration="42.267159939s" podCreationTimestamp="2025-10-13 13:20:11 +0000 UTC" firstStartedPulling="2025-10-13 13:20:14.288193974 +0000 UTC m=+768.855578084" lastFinishedPulling="2025-10-13 13:20:52.789585841 +0000 UTC m=+807.356969931" observedRunningTime="2025-10-13 13:20:53.26684342 +0000 UTC m=+807.834227490" watchObservedRunningTime="2025-10-13 13:20:53.267159939 +0000 UTC m=+807.834544029" Oct 13 13:20:53 crc kubenswrapper[4684]: I1013 13:20:53.385413 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:53 crc kubenswrapper[4684]: I1013 13:20:53.431313 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:54 crc kubenswrapper[4684]: I1013 13:20:54.468057 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nvrns"] Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.258080 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nvrns" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="registry-server" containerID="cri-o://10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460" gracePeriod=2 Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.626309 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.816499 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-catalog-content\") pod \"4d395242-0cc1-4f47-958d-ee109434b8d2\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.816665 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp6rq\" (UniqueName: \"kubernetes.io/projected/4d395242-0cc1-4f47-958d-ee109434b8d2-kube-api-access-sp6rq\") pod \"4d395242-0cc1-4f47-958d-ee109434b8d2\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.816890 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-utilities\") pod \"4d395242-0cc1-4f47-958d-ee109434b8d2\" (UID: \"4d395242-0cc1-4f47-958d-ee109434b8d2\") " Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.817883 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-utilities" (OuterVolumeSpecName: "utilities") pod "4d395242-0cc1-4f47-958d-ee109434b8d2" (UID: "4d395242-0cc1-4f47-958d-ee109434b8d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.822864 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d395242-0cc1-4f47-958d-ee109434b8d2-kube-api-access-sp6rq" (OuterVolumeSpecName: "kube-api-access-sp6rq") pod "4d395242-0cc1-4f47-958d-ee109434b8d2" (UID: "4d395242-0cc1-4f47-958d-ee109434b8d2"). InnerVolumeSpecName "kube-api-access-sp6rq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.912531 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d395242-0cc1-4f47-958d-ee109434b8d2" (UID: "4d395242-0cc1-4f47-958d-ee109434b8d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.918472 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.918528 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d395242-0cc1-4f47-958d-ee109434b8d2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:20:55 crc kubenswrapper[4684]: I1013 13:20:55.918546 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp6rq\" (UniqueName: \"kubernetes.io/projected/4d395242-0cc1-4f47-958d-ee109434b8d2-kube-api-access-sp6rq\") on node \"crc\" DevicePath \"\"" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.267526 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerID="10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460" exitCode=0 Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.267572 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvrns" event={"ID":"4d395242-0cc1-4f47-958d-ee109434b8d2","Type":"ContainerDied","Data":"10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460"} Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.267602 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvrns" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.267615 4684 scope.go:117] "RemoveContainer" containerID="10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.267604 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvrns" event={"ID":"4d395242-0cc1-4f47-958d-ee109434b8d2","Type":"ContainerDied","Data":"ee95cea1b8d05268bf6fb6123ec5f79cf89028bb49160ba4ea829d286d3d7349"} Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.289233 4684 scope.go:117] "RemoveContainer" containerID="9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.302141 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nvrns"] Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.306590 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nvrns"] Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.318113 4684 scope.go:117] "RemoveContainer" containerID="9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.337575 4684 scope.go:117] "RemoveContainer" containerID="10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460" Oct 13 13:20:56 crc kubenswrapper[4684]: E1013 13:20:56.338347 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460\": container with ID starting with 10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460 not found: ID does not exist" containerID="10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.338390 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460"} err="failed to get container status \"10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460\": rpc error: code = NotFound desc = could not find container \"10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460\": container with ID starting with 10990e35a58c1a64c56fc097f78f3737b3bf28deb2489a66b39ddf0566513460 not found: ID does not exist" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.338413 4684 scope.go:117] "RemoveContainer" containerID="9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c" Oct 13 13:20:56 crc kubenswrapper[4684]: E1013 13:20:56.338688 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c\": container with ID starting with 9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c not found: ID does not exist" containerID="9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.338710 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c"} err="failed to get container status \"9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c\": rpc error: code = NotFound desc = could not find container \"9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c\": container with ID starting with 9ab9b524ce24c19228238dc5e032cfacfcd1ef5afb72df3166be8378c62a2a2c not found: ID does not exist" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.338723 4684 scope.go:117] "RemoveContainer" containerID="9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee" Oct 13 13:20:56 crc kubenswrapper[4684]: E1013 13:20:56.338955 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee\": container with ID starting with 9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee not found: ID does not exist" containerID="9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.338971 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee"} err="failed to get container status \"9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee\": rpc error: code = NotFound desc = could not find container \"9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee\": container with ID starting with 9639f7982ac351a23469323aecdf0735c714f3b4ca436a8ecb008ab90d2a2bee not found: ID does not exist" Oct 13 13:20:56 crc kubenswrapper[4684]: I1013 13:20:56.363815 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" path="/var/lib/kubelet/pods/4d395242-0cc1-4f47-958d-ee109434b8d2/volumes" Oct 13 13:21:01 crc kubenswrapper[4684]: I1013 13:21:01.545632 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-zbvph" Oct 13 13:21:01 crc kubenswrapper[4684]: I1013 13:21:01.786482 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-52z6n" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.480373 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8spk8"] Oct 13 13:21:06 crc kubenswrapper[4684]: E1013 13:21:06.481155 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="extract-utilities" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481168 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="extract-utilities" Oct 13 13:21:06 crc kubenswrapper[4684]: E1013 13:21:06.481189 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="extract-utilities" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481195 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="extract-utilities" Oct 13 13:21:06 crc kubenswrapper[4684]: E1013 13:21:06.481210 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="registry-server" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481216 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="registry-server" Oct 13 13:21:06 crc kubenswrapper[4684]: E1013 13:21:06.481241 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="extract-content" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481246 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="extract-content" Oct 13 13:21:06 crc kubenswrapper[4684]: E1013 13:21:06.481261 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="registry-server" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481266 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="registry-server" Oct 13 13:21:06 crc kubenswrapper[4684]: E1013 13:21:06.481421 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="extract-content" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481434 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="extract-content" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481596 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7001d9c-bf79-45f6-ad84-c81dba4966d7" containerName="registry-server" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.481620 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d395242-0cc1-4f47-958d-ee109434b8d2" containerName="registry-server" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.482596 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.489226 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8spk8"] Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.601468 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-catalog-content\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.601531 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-utilities\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.601781 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdnnf\" (UniqueName: \"kubernetes.io/projected/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-kube-api-access-vdnnf\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.702891 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdnnf\" (UniqueName: \"kubernetes.io/projected/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-kube-api-access-vdnnf\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.703012 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-catalog-content\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.703040 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-utilities\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.703606 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-utilities\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.703640 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-catalog-content\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.725634 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdnnf\" (UniqueName: \"kubernetes.io/projected/4262fc7d-7332-42ac-9a54-06e4dc3c4d46-kube-api-access-vdnnf\") pod \"community-operators-8spk8\" (UID: \"4262fc7d-7332-42ac-9a54-06e4dc3c4d46\") " pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:06 crc kubenswrapper[4684]: I1013 13:21:06.800412 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:07 crc kubenswrapper[4684]: I1013 13:21:07.341716 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8spk8"] Oct 13 13:21:07 crc kubenswrapper[4684]: W1013 13:21:07.342258 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4262fc7d_7332_42ac_9a54_06e4dc3c4d46.slice/crio-92c82744fc9b1762c9c2b7034d17a0d485c58bcd84a102cf1ac0dc899c63d347 WatchSource:0}: Error finding container 92c82744fc9b1762c9c2b7034d17a0d485c58bcd84a102cf1ac0dc899c63d347: Status 404 returned error can't find the container with id 92c82744fc9b1762c9c2b7034d17a0d485c58bcd84a102cf1ac0dc899c63d347 Oct 13 13:21:07 crc kubenswrapper[4684]: I1013 13:21:07.360767 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8spk8" event={"ID":"4262fc7d-7332-42ac-9a54-06e4dc3c4d46","Type":"ContainerStarted","Data":"92c82744fc9b1762c9c2b7034d17a0d485c58bcd84a102cf1ac0dc899c63d347"} Oct 13 13:21:08 crc kubenswrapper[4684]: I1013 13:21:08.368008 4684 generic.go:334] "Generic (PLEG): container finished" podID="4262fc7d-7332-42ac-9a54-06e4dc3c4d46" containerID="c1c597bf498a01db939d190c78b9347183548d0a6594bec7a65f70adbeaba7a0" exitCode=0 Oct 13 13:21:08 crc kubenswrapper[4684]: I1013 13:21:08.368048 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8spk8" event={"ID":"4262fc7d-7332-42ac-9a54-06e4dc3c4d46","Type":"ContainerDied","Data":"c1c597bf498a01db939d190c78b9347183548d0a6594bec7a65f70adbeaba7a0"} Oct 13 13:21:12 crc kubenswrapper[4684]: I1013 13:21:12.401610 4684 generic.go:334] "Generic (PLEG): container finished" podID="4262fc7d-7332-42ac-9a54-06e4dc3c4d46" containerID="b85f36f2452d60bffff588430ee168eccd69c4563ed8876268b4778c0ae56f68" exitCode=0 Oct 13 13:21:12 crc kubenswrapper[4684]: I1013 13:21:12.401730 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8spk8" event={"ID":"4262fc7d-7332-42ac-9a54-06e4dc3c4d46","Type":"ContainerDied","Data":"b85f36f2452d60bffff588430ee168eccd69c4563ed8876268b4778c0ae56f68"} Oct 13 13:21:13 crc kubenswrapper[4684]: I1013 13:21:13.417097 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8spk8" event={"ID":"4262fc7d-7332-42ac-9a54-06e4dc3c4d46","Type":"ContainerStarted","Data":"2eefed75529bb36003bc654b02f2d10d5a6f834d0caaead6302cc50e5e19d134"} Oct 13 13:21:13 crc kubenswrapper[4684]: I1013 13:21:13.436443 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8spk8" podStartSLOduration=2.828580109 podStartE2EDuration="7.436423774s" podCreationTimestamp="2025-10-13 13:21:06 +0000 UTC" firstStartedPulling="2025-10-13 13:21:08.370143608 +0000 UTC m=+822.937527678" lastFinishedPulling="2025-10-13 13:21:12.977987263 +0000 UTC m=+827.545371343" observedRunningTime="2025-10-13 13:21:13.431889531 +0000 UTC m=+827.999273601" watchObservedRunningTime="2025-10-13 13:21:13.436423774 +0000 UTC m=+828.003807844" Oct 13 13:21:16 crc kubenswrapper[4684]: I1013 13:21:16.801619 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:16 crc kubenswrapper[4684]: I1013 13:21:16.802201 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:16 crc kubenswrapper[4684]: I1013 13:21:16.843146 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.104501 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c9cdf56f7-g6gv9"] Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.105982 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.108339 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-r7x9v" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.108493 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.113966 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c9cdf56f7-g6gv9"] Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.114221 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.114223 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.153555 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7975cd48fc-fzzwb"] Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.155745 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.159189 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.162833 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7975cd48fc-fzzwb"] Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.274036 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294b7604-06b5-46f9-84fb-5e9ffd82f705-config\") pod \"dnsmasq-dns-c9cdf56f7-g6gv9\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.274086 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-config\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.274130 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8452\" (UniqueName: \"kubernetes.io/projected/294b7604-06b5-46f9-84fb-5e9ffd82f705-kube-api-access-w8452\") pod \"dnsmasq-dns-c9cdf56f7-g6gv9\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.274176 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-dns-svc\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.274210 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82d7m\" (UniqueName: \"kubernetes.io/projected/6c677329-7994-4fb3-9ecb-777589a3870c-kube-api-access-82d7m\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.375558 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82d7m\" (UniqueName: \"kubernetes.io/projected/6c677329-7994-4fb3-9ecb-777589a3870c-kube-api-access-82d7m\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.375643 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294b7604-06b5-46f9-84fb-5e9ffd82f705-config\") pod \"dnsmasq-dns-c9cdf56f7-g6gv9\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.375661 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-config\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.375679 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8452\" (UniqueName: \"kubernetes.io/projected/294b7604-06b5-46f9-84fb-5e9ffd82f705-kube-api-access-w8452\") pod \"dnsmasq-dns-c9cdf56f7-g6gv9\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.375722 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-dns-svc\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.376700 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-dns-svc\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.377072 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294b7604-06b5-46f9-84fb-5e9ffd82f705-config\") pod \"dnsmasq-dns-c9cdf56f7-g6gv9\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.377396 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-config\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.408023 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8452\" (UniqueName: \"kubernetes.io/projected/294b7604-06b5-46f9-84fb-5e9ffd82f705-kube-api-access-w8452\") pod \"dnsmasq-dns-c9cdf56f7-g6gv9\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.408287 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82d7m\" (UniqueName: \"kubernetes.io/projected/6c677329-7994-4fb3-9ecb-777589a3870c-kube-api-access-82d7m\") pod \"dnsmasq-dns-7975cd48fc-fzzwb\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.428338 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.477023 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.938860 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c9cdf56f7-g6gv9"] Oct 13 13:21:19 crc kubenswrapper[4684]: I1013 13:21:19.952893 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7975cd48fc-fzzwb"] Oct 13 13:21:19 crc kubenswrapper[4684]: W1013 13:21:19.956857 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c677329_7994_4fb3_9ecb_777589a3870c.slice/crio-f5969d96f549d00fe98e90168f3b080bc5016eb122b5dd99a3f509fbf306f199 WatchSource:0}: Error finding container f5969d96f549d00fe98e90168f3b080bc5016eb122b5dd99a3f509fbf306f199: Status 404 returned error can't find the container with id f5969d96f549d00fe98e90168f3b080bc5016eb122b5dd99a3f509fbf306f199 Oct 13 13:21:20 crc kubenswrapper[4684]: I1013 13:21:20.473183 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" event={"ID":"294b7604-06b5-46f9-84fb-5e9ffd82f705","Type":"ContainerStarted","Data":"b57e8fb03e18be837fd89f964134ebece16795354467895a9d4377a47dca3b3d"} Oct 13 13:21:20 crc kubenswrapper[4684]: I1013 13:21:20.479679 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" event={"ID":"6c677329-7994-4fb3-9ecb-777589a3870c","Type":"ContainerStarted","Data":"f5969d96f549d00fe98e90168f3b080bc5016eb122b5dd99a3f509fbf306f199"} Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.389162 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7975cd48fc-fzzwb"] Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.420538 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65966b547c-f2sgb"] Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.421995 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.452868 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65966b547c-f2sgb"] Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.520512 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-dns-svc\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.520599 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh64v\" (UniqueName: \"kubernetes.io/projected/3266d343-4fda-44d5-8e12-f44de29d6600-kube-api-access-hh64v\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.520650 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-config\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.626700 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-config\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.626795 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-dns-svc\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.626860 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh64v\" (UniqueName: \"kubernetes.io/projected/3266d343-4fda-44d5-8e12-f44de29d6600-kube-api-access-hh64v\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.628460 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-dns-svc\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.628498 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-config\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.663751 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh64v\" (UniqueName: \"kubernetes.io/projected/3266d343-4fda-44d5-8e12-f44de29d6600-kube-api-access-hh64v\") pod \"dnsmasq-dns-65966b547c-f2sgb\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.697486 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c9cdf56f7-g6gv9"] Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.749213 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57486d8b9f-fzmv4"] Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.750574 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.760942 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.769970 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57486d8b9f-fzmv4"] Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.930194 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9t29\" (UniqueName: \"kubernetes.io/projected/38c94262-1b0c-4b65-939b-f3ea4171513f-kube-api-access-m9t29\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.930546 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-config\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:22 crc kubenswrapper[4684]: I1013 13:21:22.930578 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-dns-svc\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.031680 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9t29\" (UniqueName: \"kubernetes.io/projected/38c94262-1b0c-4b65-939b-f3ea4171513f-kube-api-access-m9t29\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.031759 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-config\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.031796 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-dns-svc\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.034516 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-dns-svc\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.034528 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-config\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.050062 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9t29\" (UniqueName: \"kubernetes.io/projected/38c94262-1b0c-4b65-939b-f3ea4171513f-kube-api-access-m9t29\") pod \"dnsmasq-dns-57486d8b9f-fzmv4\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.082777 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.263855 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65966b547c-f2sgb"] Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.524042 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57486d8b9f-fzmv4"] Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.524482 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" event={"ID":"3266d343-4fda-44d5-8e12-f44de29d6600","Type":"ContainerStarted","Data":"f6a60e5df06a74e0e4bc09ff262f4389d110eb17633642d11f09d41b149ece24"} Oct 13 13:21:23 crc kubenswrapper[4684]: W1013 13:21:23.534994 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38c94262_1b0c_4b65_939b_f3ea4171513f.slice/crio-86524202f713792ef4f528c878a9b5afc4155da502f8536c8e83873e9ef13db7 WatchSource:0}: Error finding container 86524202f713792ef4f528c878a9b5afc4155da502f8536c8e83873e9ef13db7: Status 404 returned error can't find the container with id 86524202f713792ef4f528c878a9b5afc4155da502f8536c8e83873e9ef13db7 Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.557726 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.559425 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.564945 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.570754 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.571002 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.571136 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.571330 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-njm7l" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.571417 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.571714 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.572985 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744251 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744309 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b29378a-0de1-402e-993a-a83cc3d41b67-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744336 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744361 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwlk2\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-kube-api-access-pwlk2\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744386 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744414 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744437 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b29378a-0de1-402e-993a-a83cc3d41b67-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744478 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744525 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744693 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.744760 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-config-data\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.846530 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847078 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847116 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847140 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-config-data\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847176 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847203 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b29378a-0de1-402e-993a-a83cc3d41b67-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847229 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847253 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwlk2\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-kube-api-access-pwlk2\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847274 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847300 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.847438 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b29378a-0de1-402e-993a-a83cc3d41b67-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.848962 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.849728 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.849889 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.850053 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.850180 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.850488 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-config-data\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.855800 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b29378a-0de1-402e-993a-a83cc3d41b67-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.855819 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.856840 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.864926 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b29378a-0de1-402e-993a-a83cc3d41b67-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.868226 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.877758 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwlk2\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-kube-api-access-pwlk2\") pod \"rabbitmq-server-0\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " pod="openstack/rabbitmq-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.881783 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.883117 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.887310 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.890574 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.890794 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.890927 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.891038 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-vsbcb" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.891151 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.892363 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.892469 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 13 13:21:23 crc kubenswrapper[4684]: I1013 13:21:23.907144 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.050967 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051122 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051201 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051241 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c869998b-76f5-409d-9ff4-4abe3f7c9289-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051311 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051371 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c869998b-76f5-409d-9ff4-4abe3f7c9289-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051416 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051458 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051479 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051493 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.051507 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkq8m\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-kube-api-access-rkq8m\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.152946 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.152997 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c869998b-76f5-409d-9ff4-4abe3f7c9289-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153026 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153052 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153070 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkq8m\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-kube-api-access-rkq8m\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153087 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153100 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153152 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153168 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153187 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.153643 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c869998b-76f5-409d-9ff4-4abe3f7c9289-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.159491 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.160379 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.160688 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.161425 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.161555 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.162114 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.163810 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.166577 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.168449 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c869998b-76f5-409d-9ff4-4abe3f7c9289-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.168854 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c869998b-76f5-409d-9ff4-4abe3f7c9289-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.180487 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkq8m\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-kube-api-access-rkq8m\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.193652 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.240288 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.430792 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:21:24 crc kubenswrapper[4684]: I1013 13:21:24.532052 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" event={"ID":"38c94262-1b0c-4b65-939b-f3ea4171513f","Type":"ContainerStarted","Data":"86524202f713792ef4f528c878a9b5afc4155da502f8536c8e83873e9ef13db7"} Oct 13 13:21:24 crc kubenswrapper[4684]: W1013 13:21:24.942666 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b29378a_0de1_402e_993a_a83cc3d41b67.slice/crio-c97587bef6c1903630ab03f7b04b416310f10567d0380856dfafac402dbb08de WatchSource:0}: Error finding container c97587bef6c1903630ab03f7b04b416310f10567d0380856dfafac402dbb08de: Status 404 returned error can't find the container with id c97587bef6c1903630ab03f7b04b416310f10567d0380856dfafac402dbb08de Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.315768 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.323078 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.327167 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-d22tl" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.327645 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.327945 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.328342 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.330009 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.338161 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.347143 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.446284 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505250 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505324 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-kolla-config\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505355 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505528 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7bkr\" (UniqueName: \"kubernetes.io/projected/4d4341d2-642e-4c4c-b517-edb89e87d1f0-kube-api-access-b7bkr\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505672 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4d4341d2-642e-4c4c-b517-edb89e87d1f0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505828 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.505975 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-secrets\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.506126 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-config-data-default\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.506845 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.540673 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b29378a-0de1-402e-993a-a83cc3d41b67","Type":"ContainerStarted","Data":"c97587bef6c1903630ab03f7b04b416310f10567d0380856dfafac402dbb08de"} Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609042 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-config-data-default\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609182 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609206 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-kolla-config\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609285 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609348 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7bkr\" (UniqueName: \"kubernetes.io/projected/4d4341d2-642e-4c4c-b517-edb89e87d1f0-kube-api-access-b7bkr\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609406 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4d4341d2-642e-4c4c-b517-edb89e87d1f0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609498 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.609569 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-secrets\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.610223 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.610555 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4d4341d2-642e-4c4c-b517-edb89e87d1f0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.610952 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-kolla-config\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.611422 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-config-data-default\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.611589 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d4341d2-642e-4c4c-b517-edb89e87d1f0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.622494 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.629908 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.638616 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4d4341d2-642e-4c4c-b517-edb89e87d1f0-secrets\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.646157 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7bkr\" (UniqueName: \"kubernetes.io/projected/4d4341d2-642e-4c4c-b517-edb89e87d1f0-kube-api-access-b7bkr\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.657836 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"4d4341d2-642e-4c4c-b517-edb89e87d1f0\") " pod="openstack/openstack-galera-0" Oct 13 13:21:25 crc kubenswrapper[4684]: I1013 13:21:25.946571 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.731521 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.732983 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.735630 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rdd9n" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.735796 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.735808 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.750512 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.762374 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.799540 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.801860 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.806282 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.806418 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.806554 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-4fnw6" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.809669 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.836759 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.836862 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.836890 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ab39473e-47b6-4570-b1f6-f81ee811c19f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.836957 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.836981 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfs6n\" (UniqueName: \"kubernetes.io/projected/ab39473e-47b6-4570-b1f6-f81ee811c19f-kube-api-access-zfs6n\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.837261 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.837301 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.837349 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.837369 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.891923 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8spk8" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939639 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73377942-2512-4398-b6ca-25aa9a591619-combined-ca-bundle\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939750 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939819 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ab39473e-47b6-4570-b1f6-f81ee811c19f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939845 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939867 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfs6n\" (UniqueName: \"kubernetes.io/projected/ab39473e-47b6-4570-b1f6-f81ee811c19f-kube-api-access-zfs6n\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939912 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/73377942-2512-4398-b6ca-25aa9a591619-kolla-config\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.939955 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7s95\" (UniqueName: \"kubernetes.io/projected/73377942-2512-4398-b6ca-25aa9a591619-kube-api-access-b7s95\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940016 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940057 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940084 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/73377942-2512-4398-b6ca-25aa9a591619-config-data\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940113 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940144 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940174 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/73377942-2512-4398-b6ca-25aa9a591619-memcached-tls-certs\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.940214 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.941316 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.941558 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.942090 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ab39473e-47b6-4570-b1f6-f81ee811c19f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.943349 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.947289 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab39473e-47b6-4570-b1f6-f81ee811c19f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.948539 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.948848 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.959658 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab39473e-47b6-4570-b1f6-f81ee811c19f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:26 crc kubenswrapper[4684]: I1013 13:21:26.968712 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfs6n\" (UniqueName: \"kubernetes.io/projected/ab39473e-47b6-4570-b1f6-f81ee811c19f-kube-api-access-zfs6n\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.018657 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"ab39473e-47b6-4570-b1f6-f81ee811c19f\") " pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.038185 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8spk8"] Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.041591 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73377942-2512-4398-b6ca-25aa9a591619-combined-ca-bundle\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.041640 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/73377942-2512-4398-b6ca-25aa9a591619-kolla-config\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.041678 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7s95\" (UniqueName: \"kubernetes.io/projected/73377942-2512-4398-b6ca-25aa9a591619-kube-api-access-b7s95\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.041726 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/73377942-2512-4398-b6ca-25aa9a591619-config-data\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.041752 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/73377942-2512-4398-b6ca-25aa9a591619-memcached-tls-certs\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.044581 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/73377942-2512-4398-b6ca-25aa9a591619-config-data\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.045460 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/73377942-2512-4398-b6ca-25aa9a591619-kolla-config\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.048198 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/73377942-2512-4398-b6ca-25aa9a591619-memcached-tls-certs\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.048963 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73377942-2512-4398-b6ca-25aa9a591619-combined-ca-bundle\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.059843 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.065515 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7s95\" (UniqueName: \"kubernetes.io/projected/73377942-2512-4398-b6ca-25aa9a591619-kube-api-access-b7s95\") pod \"memcached-0\" (UID: \"73377942-2512-4398-b6ca-25aa9a591619\") " pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.121161 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mv7b"] Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.121704 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9mv7b" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="registry-server" containerID="cri-o://199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136" gracePeriod=2 Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.150232 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.598086 4684 generic.go:334] "Generic (PLEG): container finished" podID="22ed76a4-376f-43eb-89d1-995c25747c97" containerID="199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136" exitCode=0 Oct 13 13:21:27 crc kubenswrapper[4684]: I1013 13:21:27.598169 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mv7b" event={"ID":"22ed76a4-376f-43eb-89d1-995c25747c97","Type":"ContainerDied","Data":"199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136"} Oct 13 13:21:28 crc kubenswrapper[4684]: I1013 13:21:28.729086 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:21:28 crc kubenswrapper[4684]: I1013 13:21:28.731268 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 13:21:28 crc kubenswrapper[4684]: I1013 13:21:28.735808 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:21:28 crc kubenswrapper[4684]: I1013 13:21:28.743061 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-gpb82" Oct 13 13:21:28 crc kubenswrapper[4684]: I1013 13:21:28.879749 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hgzp\" (UniqueName: \"kubernetes.io/projected/d9101813-710f-4c10-8510-415d62289ab1-kube-api-access-5hgzp\") pod \"kube-state-metrics-0\" (UID: \"d9101813-710f-4c10-8510-415d62289ab1\") " pod="openstack/kube-state-metrics-0" Oct 13 13:21:28 crc kubenswrapper[4684]: I1013 13:21:28.981240 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hgzp\" (UniqueName: \"kubernetes.io/projected/d9101813-710f-4c10-8510-415d62289ab1-kube-api-access-5hgzp\") pod \"kube-state-metrics-0\" (UID: \"d9101813-710f-4c10-8510-415d62289ab1\") " pod="openstack/kube-state-metrics-0" Oct 13 13:21:29 crc kubenswrapper[4684]: I1013 13:21:29.006343 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hgzp\" (UniqueName: \"kubernetes.io/projected/d9101813-710f-4c10-8510-415d62289ab1-kube-api-access-5hgzp\") pod \"kube-state-metrics-0\" (UID: \"d9101813-710f-4c10-8510-415d62289ab1\") " pod="openstack/kube-state-metrics-0" Oct 13 13:21:29 crc kubenswrapper[4684]: I1013 13:21:29.078637 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 13:21:30 crc kubenswrapper[4684]: W1013 13:21:30.915252 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc869998b_76f5_409d_9ff4_4abe3f7c9289.slice/crio-30d11a503f43caaa38147a704515c19add3431ff78c64105d56a4814b56b9e7e WatchSource:0}: Error finding container 30d11a503f43caaa38147a704515c19add3431ff78c64105d56a4814b56b9e7e: Status 404 returned error can't find the container with id 30d11a503f43caaa38147a704515c19add3431ff78c64105d56a4814b56b9e7e Oct 13 13:21:30 crc kubenswrapper[4684]: I1013 13:21:30.926391 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:21:31 crc kubenswrapper[4684]: I1013 13:21:31.643364 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c869998b-76f5-409d-9ff4-4abe3f7c9289","Type":"ContainerStarted","Data":"30d11a503f43caaa38147a704515c19add3431ff78c64105d56a4814b56b9e7e"} Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.013661 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.160614 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-utilities\") pod \"22ed76a4-376f-43eb-89d1-995c25747c97\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.160772 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf5k4\" (UniqueName: \"kubernetes.io/projected/22ed76a4-376f-43eb-89d1-995c25747c97-kube-api-access-nf5k4\") pod \"22ed76a4-376f-43eb-89d1-995c25747c97\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.160857 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-catalog-content\") pod \"22ed76a4-376f-43eb-89d1-995c25747c97\" (UID: \"22ed76a4-376f-43eb-89d1-995c25747c97\") " Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.162812 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-utilities" (OuterVolumeSpecName: "utilities") pod "22ed76a4-376f-43eb-89d1-995c25747c97" (UID: "22ed76a4-376f-43eb-89d1-995c25747c97"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.167666 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22ed76a4-376f-43eb-89d1-995c25747c97-kube-api-access-nf5k4" (OuterVolumeSpecName: "kube-api-access-nf5k4") pod "22ed76a4-376f-43eb-89d1-995c25747c97" (UID: "22ed76a4-376f-43eb-89d1-995c25747c97"). InnerVolumeSpecName "kube-api-access-nf5k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.230170 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22ed76a4-376f-43eb-89d1-995c25747c97" (UID: "22ed76a4-376f-43eb-89d1-995c25747c97"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.262989 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.263034 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ed76a4-376f-43eb-89d1-995c25747c97-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.263050 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf5k4\" (UniqueName: \"kubernetes.io/projected/22ed76a4-376f-43eb-89d1-995c25747c97-kube-api-access-nf5k4\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.376613 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.657509 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mv7b" event={"ID":"22ed76a4-376f-43eb-89d1-995c25747c97","Type":"ContainerDied","Data":"a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074"} Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.657833 4684 scope.go:117] "RemoveContainer" containerID="199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.657924 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mv7b" Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.677458 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mv7b"] Oct 13 13:21:32 crc kubenswrapper[4684]: I1013 13:21:32.684566 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9mv7b"] Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.050222 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jgthj"] Oct 13 13:21:33 crc kubenswrapper[4684]: E1013 13:21:33.050593 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="registry-server" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.050609 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="registry-server" Oct 13 13:21:33 crc kubenswrapper[4684]: E1013 13:21:33.050630 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="extract-utilities" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.050637 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="extract-utilities" Oct 13 13:21:33 crc kubenswrapper[4684]: E1013 13:21:33.050661 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="extract-content" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.050671 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="extract-content" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.050850 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" containerName="registry-server" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.051482 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.055928 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.056154 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.056918 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-ks5vs" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.072989 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-fc6hl"] Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.081429 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.118836 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jgthj"] Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.152055 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-fc6hl"] Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187507 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-log-ovn\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187590 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-lib\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187615 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-etc-ovs\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187651 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ffch\" (UniqueName: \"kubernetes.io/projected/2b902966-f950-4166-b594-afacd52e5346-kube-api-access-8ffch\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187688 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a984144e-7322-4045-a696-7ec4b746e061-ovn-controller-tls-certs\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a984144e-7322-4045-a696-7ec4b746e061-combined-ca-bundle\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187788 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-run-ovn\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187819 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-log\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187850 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a984144e-7322-4045-a696-7ec4b746e061-scripts\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187876 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-run\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187919 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b902966-f950-4166-b594-afacd52e5346-scripts\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187961 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-run\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.187990 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr4d2\" (UniqueName: \"kubernetes.io/projected/a984144e-7322-4045-a696-7ec4b746e061-kube-api-access-zr4d2\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289499 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b902966-f950-4166-b594-afacd52e5346-scripts\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289600 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-run\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289632 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr4d2\" (UniqueName: \"kubernetes.io/projected/a984144e-7322-4045-a696-7ec4b746e061-kube-api-access-zr4d2\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289670 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-log-ovn\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289712 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-lib\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289740 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-etc-ovs\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289773 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ffch\" (UniqueName: \"kubernetes.io/projected/2b902966-f950-4166-b594-afacd52e5346-kube-api-access-8ffch\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289800 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a984144e-7322-4045-a696-7ec4b746e061-ovn-controller-tls-certs\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289829 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a984144e-7322-4045-a696-7ec4b746e061-combined-ca-bundle\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289853 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-run-ovn\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289879 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-log\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289926 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a984144e-7322-4045-a696-7ec4b746e061-scripts\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.289956 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-run\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.290453 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-log-ovn\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.290758 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-run-ovn\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.291129 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-log\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.291242 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a984144e-7322-4045-a696-7ec4b746e061-var-run\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.291347 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-lib\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.291370 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-var-run\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.292274 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a984144e-7322-4045-a696-7ec4b746e061-scripts\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.292430 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2b902966-f950-4166-b594-afacd52e5346-etc-ovs\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.294477 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b902966-f950-4166-b594-afacd52e5346-scripts\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.306992 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a984144e-7322-4045-a696-7ec4b746e061-ovn-controller-tls-certs\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.307467 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a984144e-7322-4045-a696-7ec4b746e061-combined-ca-bundle\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.308793 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr4d2\" (UniqueName: \"kubernetes.io/projected/a984144e-7322-4045-a696-7ec4b746e061-kube-api-access-zr4d2\") pod \"ovn-controller-jgthj\" (UID: \"a984144e-7322-4045-a696-7ec4b746e061\") " pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.311045 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ffch\" (UniqueName: \"kubernetes.io/projected/2b902966-f950-4166-b594-afacd52e5346-kube-api-access-8ffch\") pod \"ovn-controller-ovs-fc6hl\" (UID: \"2b902966-f950-4166-b594-afacd52e5346\") " pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.398863 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj" Oct 13 13:21:33 crc kubenswrapper[4684]: I1013 13:21:33.440711 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:34 crc kubenswrapper[4684]: I1013 13:21:34.363104 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22ed76a4-376f-43eb-89d1-995c25747c97" path="/var/lib/kubelet/pods/22ed76a4-376f-43eb-89d1-995c25747c97/volumes" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.077505 4684 scope.go:117] "RemoveContainer" containerID="199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.704058 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.705753 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.710867 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.713783 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.714047 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.714196 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.714414 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-xhjkp" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.714621 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833083 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833146 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833177 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833276 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-config\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833329 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833421 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833471 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.833510 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29ct4\" (UniqueName: \"kubernetes.io/projected/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-kube-api-access-29ct4\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.892074 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.896328 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.899316 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.899735 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.900836 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.902018 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-5rgj5" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.903355 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935342 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935403 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29ct4\" (UniqueName: \"kubernetes.io/projected/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-kube-api-access-29ct4\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935473 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935498 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935514 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935539 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-config\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935558 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935585 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935892 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.935972 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.937518 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.937519 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-config\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.941567 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.941666 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.941915 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.952241 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29ct4\" (UniqueName: \"kubernetes.io/projected/40f06c76-a8c8-4f1c-a8b4-49fe81d3912e-kube-api-access-29ct4\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:35 crc kubenswrapper[4684]: I1013 13:21:35.970050 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e\") " pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.034437 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039591 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039637 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039684 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039706 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b199f677-58e1-4a16-b904-5517b06a2b5e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039724 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b199f677-58e1-4a16-b904-5517b06a2b5e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039744 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b199f677-58e1-4a16-b904-5517b06a2b5e-config\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039765 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.039787 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7tf8\" (UniqueName: \"kubernetes.io/projected/b199f677-58e1-4a16-b904-5517b06a2b5e-kube-api-access-c7tf8\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.141423 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.141501 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.141579 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.141607 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b199f677-58e1-4a16-b904-5517b06a2b5e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.141836 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.143050 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b199f677-58e1-4a16-b904-5517b06a2b5e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.144113 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b199f677-58e1-4a16-b904-5517b06a2b5e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.144149 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b199f677-58e1-4a16-b904-5517b06a2b5e-config\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.144179 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.144211 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7tf8\" (UniqueName: \"kubernetes.io/projected/b199f677-58e1-4a16-b904-5517b06a2b5e-kube-api-access-c7tf8\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.145078 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b199f677-58e1-4a16-b904-5517b06a2b5e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.145439 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b199f677-58e1-4a16-b904-5517b06a2b5e-config\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.146386 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.147195 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.148518 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b199f677-58e1-4a16-b904-5517b06a2b5e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.164980 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.173141 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7tf8\" (UniqueName: \"kubernetes.io/projected/b199f677-58e1-4a16-b904-5517b06a2b5e-kube-api-access-c7tf8\") pod \"ovsdbserver-sb-0\" (UID: \"b199f677-58e1-4a16-b904-5517b06a2b5e\") " pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:36 crc kubenswrapper[4684]: I1013 13:21:36.219221 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:37 crc kubenswrapper[4684]: W1013 13:21:37.221875 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73377942_2512_4398_b6ca_25aa9a591619.slice/crio-a1bb232f89d9b9df5f76bce3bdd643840a602da7e38332b35c7c23a937312ef1 WatchSource:0}: Error finding container a1bb232f89d9b9df5f76bce3bdd643840a602da7e38332b35c7c23a937312ef1: Status 404 returned error can't find the container with id a1bb232f89d9b9df5f76bce3bdd643840a602da7e38332b35c7c23a937312ef1 Oct 13 13:21:37 crc kubenswrapper[4684]: I1013 13:21:37.678257 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:21:37 crc kubenswrapper[4684]: I1013 13:21:37.714074 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"73377942-2512-4398-b6ca-25aa9a591619","Type":"ContainerStarted","Data":"a1bb232f89d9b9df5f76bce3bdd643840a602da7e38332b35c7c23a937312ef1"} Oct 13 13:21:41 crc kubenswrapper[4684]: I1013 13:21:41.629575 4684 scope.go:117] "RemoveContainer" containerID="4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958" Oct 13 13:21:41 crc kubenswrapper[4684]: E1013 13:21:41.638302 4684 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_registry-server_community-operators-9mv7b_openshift-marketplace_22ed76a4-376f-43eb-89d1-995c25747c97_0 in pod sandbox a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074: identifier is not a container" containerID="199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136" Oct 13 13:21:41 crc kubenswrapper[4684]: E1013 13:21:41.639442 4684 kuberuntime_gc.go:150] "Failed to remove container" err="rpc error: code = Unknown desc = failed to delete container k8s_registry-server_community-operators-9mv7b_openshift-marketplace_22ed76a4-376f-43eb-89d1-995c25747c97_0 in pod sandbox a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074: identifier is not a container" containerID="199c0d9eafd7c52aefb6233ae2d8b5b9a4927283432d65f1c1d0c84e55b6d136" Oct 13 13:21:41 crc kubenswrapper[4684]: I1013 13:21:41.639500 4684 scope.go:117] "RemoveContainer" containerID="2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797" Oct 13 13:21:41 crc kubenswrapper[4684]: I1013 13:21:41.758566 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d9101813-710f-4c10-8510-415d62289ab1","Type":"ContainerStarted","Data":"2be8e76941bd920cc17ffd973700fc9934bd7bf067fe791c1bd2c3bda4afef22"} Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.098719 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.111976 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 13 13:21:42 crc kubenswrapper[4684]: W1013 13:21:42.530992 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab39473e_47b6_4570_b1f6_f81ee811c19f.slice/crio-489166fba02cfceac1dc0d8a4a70ff5f71bbb31ebb4054096e66bb35669bc8d4 WatchSource:0}: Error finding container 489166fba02cfceac1dc0d8a4a70ff5f71bbb31ebb4054096e66bb35669bc8d4: Status 404 returned error can't find the container with id 489166fba02cfceac1dc0d8a4a70ff5f71bbb31ebb4054096e66bb35669bc8d4 Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.625157 4684 scope.go:117] "RemoveContainer" containerID="4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958" Oct 13 13:21:42 crc kubenswrapper[4684]: E1013 13:21:42.638639 4684 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_extract-content_community-operators-9mv7b_openshift-marketplace_22ed76a4-376f-43eb-89d1-995c25747c97_0 in pod sandbox a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074 from index: no such id: '4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958'" containerID="4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958" Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.638691 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958"} err="rpc error: code = Unknown desc = failed to delete container k8s_extract-content_community-operators-9mv7b_openshift-marketplace_22ed76a4-376f-43eb-89d1-995c25747c97_0 in pod sandbox a04d04f26390e47115b20381a85591c191c47558c65cf6000999a83cd1336074 from index: no such id: '4c02170d6cf1350d341f3139810dc15d8d4f3ce6a048d33dc6506bed33f1b958'" Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.638728 4684 scope.go:117] "RemoveContainer" containerID="2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797" Oct 13 13:21:42 crc kubenswrapper[4684]: E1013 13:21:42.639402 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797\": container with ID starting with 2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797 not found: ID does not exist" containerID="2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797" Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.639464 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797"} err="failed to get container status \"2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797\": rpc error: code = NotFound desc = could not find container \"2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797\": container with ID starting with 2981cce05fc85724a8731fa460e9c546a29edcbe76846a653a8c8e38badd5797 not found: ID does not exist" Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.774319 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4d4341d2-642e-4c4c-b517-edb89e87d1f0","Type":"ContainerStarted","Data":"b4533b57ae2e31761e7820ae078d5456c3e04985d4542fada7481b224663fe14"} Oct 13 13:21:42 crc kubenswrapper[4684]: I1013 13:21:42.776489 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ab39473e-47b6-4570-b1f6-f81ee811c19f","Type":"ContainerStarted","Data":"489166fba02cfceac1dc0d8a4a70ff5f71bbb31ebb4054096e66bb35669bc8d4"} Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.086743 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.107933 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jgthj"] Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.182034 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.398864 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-fc6hl"] Oct 13 13:21:43 crc kubenswrapper[4684]: W1013 13:21:43.676190 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40f06c76_a8c8_4f1c_a8b4_49fe81d3912e.slice/crio-4839afbf90665a02f878ee735599edd8a1b71d06996ced0805ac899c20abdbe6 WatchSource:0}: Error finding container 4839afbf90665a02f878ee735599edd8a1b71d06996ced0805ac899c20abdbe6: Status 404 returned error can't find the container with id 4839afbf90665a02f878ee735599edd8a1b71d06996ced0805ac899c20abdbe6 Oct 13 13:21:43 crc kubenswrapper[4684]: W1013 13:21:43.679856 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b902966_f950_4166_b594_afacd52e5346.slice/crio-f73cb55cc8a00065ee1e3b30dfebe0f7836bfd1a3ff402db66580940d7e1d0e2 WatchSource:0}: Error finding container f73cb55cc8a00065ee1e3b30dfebe0f7836bfd1a3ff402db66580940d7e1d0e2: Status 404 returned error can't find the container with id f73cb55cc8a00065ee1e3b30dfebe0f7836bfd1a3ff402db66580940d7e1d0e2 Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.803561 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fc6hl" event={"ID":"2b902966-f950-4166-b594-afacd52e5346","Type":"ContainerStarted","Data":"f73cb55cc8a00065ee1e3b30dfebe0f7836bfd1a3ff402db66580940d7e1d0e2"} Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.805359 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj" event={"ID":"a984144e-7322-4045-a696-7ec4b746e061","Type":"ContainerStarted","Data":"0248e5908d623378efff9211210098c058cadef7e7953af5cdd1ba81fd68fde0"} Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.808306 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b199f677-58e1-4a16-b904-5517b06a2b5e","Type":"ContainerStarted","Data":"7080f00e638023cc53a4e8b4b398ce66969ea0038687d121fcc14d3d65c437a3"} Oct 13 13:21:43 crc kubenswrapper[4684]: I1013 13:21:43.810340 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e","Type":"ContainerStarted","Data":"4839afbf90665a02f878ee735599edd8a1b71d06996ced0805ac899c20abdbe6"} Oct 13 13:21:44 crc kubenswrapper[4684]: I1013 13:21:44.818979 4684 generic.go:334] "Generic (PLEG): container finished" podID="6c677329-7994-4fb3-9ecb-777589a3870c" containerID="c6baf0926005364cffaea1468a07afb58f8c94b9f528b953d1f89dbf14051650" exitCode=0 Oct 13 13:21:44 crc kubenswrapper[4684]: I1013 13:21:44.819071 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" event={"ID":"6c677329-7994-4fb3-9ecb-777589a3870c","Type":"ContainerDied","Data":"c6baf0926005364cffaea1468a07afb58f8c94b9f528b953d1f89dbf14051650"} Oct 13 13:21:44 crc kubenswrapper[4684]: I1013 13:21:44.822029 4684 generic.go:334] "Generic (PLEG): container finished" podID="294b7604-06b5-46f9-84fb-5e9ffd82f705" containerID="0675dee86e7eccea6ff832b0b7f4fbd5f10ab6b2df8c64bca056f6ea98732780" exitCode=0 Oct 13 13:21:44 crc kubenswrapper[4684]: I1013 13:21:44.822050 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" event={"ID":"294b7604-06b5-46f9-84fb-5e9ffd82f705","Type":"ContainerDied","Data":"0675dee86e7eccea6ff832b0b7f4fbd5f10ab6b2df8c64bca056f6ea98732780"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.365706 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.378466 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.429609 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294b7604-06b5-46f9-84fb-5e9ffd82f705-config\") pod \"294b7604-06b5-46f9-84fb-5e9ffd82f705\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.429691 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8452\" (UniqueName: \"kubernetes.io/projected/294b7604-06b5-46f9-84fb-5e9ffd82f705-kube-api-access-w8452\") pod \"294b7604-06b5-46f9-84fb-5e9ffd82f705\" (UID: \"294b7604-06b5-46f9-84fb-5e9ffd82f705\") " Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.430522 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82d7m\" (UniqueName: \"kubernetes.io/projected/6c677329-7994-4fb3-9ecb-777589a3870c-kube-api-access-82d7m\") pod \"6c677329-7994-4fb3-9ecb-777589a3870c\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.430633 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-dns-svc\") pod \"6c677329-7994-4fb3-9ecb-777589a3870c\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.430688 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-config\") pod \"6c677329-7994-4fb3-9ecb-777589a3870c\" (UID: \"6c677329-7994-4fb3-9ecb-777589a3870c\") " Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.436996 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/294b7604-06b5-46f9-84fb-5e9ffd82f705-kube-api-access-w8452" (OuterVolumeSpecName: "kube-api-access-w8452") pod "294b7604-06b5-46f9-84fb-5e9ffd82f705" (UID: "294b7604-06b5-46f9-84fb-5e9ffd82f705"). InnerVolumeSpecName "kube-api-access-w8452". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.437129 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c677329-7994-4fb3-9ecb-777589a3870c-kube-api-access-82d7m" (OuterVolumeSpecName: "kube-api-access-82d7m") pod "6c677329-7994-4fb3-9ecb-777589a3870c" (UID: "6c677329-7994-4fb3-9ecb-777589a3870c"). InnerVolumeSpecName "kube-api-access-82d7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.455191 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6c677329-7994-4fb3-9ecb-777589a3870c" (UID: "6c677329-7994-4fb3-9ecb-777589a3870c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.461139 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/294b7604-06b5-46f9-84fb-5e9ffd82f705-config" (OuterVolumeSpecName: "config") pod "294b7604-06b5-46f9-84fb-5e9ffd82f705" (UID: "294b7604-06b5-46f9-84fb-5e9ffd82f705"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.471237 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-config" (OuterVolumeSpecName: "config") pod "6c677329-7994-4fb3-9ecb-777589a3870c" (UID: "6c677329-7994-4fb3-9ecb-777589a3870c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.533102 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/294b7604-06b5-46f9-84fb-5e9ffd82f705-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.533154 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8452\" (UniqueName: \"kubernetes.io/projected/294b7604-06b5-46f9-84fb-5e9ffd82f705-kube-api-access-w8452\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.533173 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82d7m\" (UniqueName: \"kubernetes.io/projected/6c677329-7994-4fb3-9ecb-777589a3870c-kube-api-access-82d7m\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.533183 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.533193 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c677329-7994-4fb3-9ecb-777589a3870c-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.832716 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" event={"ID":"294b7604-06b5-46f9-84fb-5e9ffd82f705","Type":"ContainerDied","Data":"b57e8fb03e18be837fd89f964134ebece16795354467895a9d4377a47dca3b3d"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.832760 4684 scope.go:117] "RemoveContainer" containerID="0675dee86e7eccea6ff832b0b7f4fbd5f10ab6b2df8c64bca056f6ea98732780" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.832854 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c9cdf56f7-g6gv9" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.840966 4684 generic.go:334] "Generic (PLEG): container finished" podID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerID="71230176b583517ccaa26a5ca56c01ae9cba8d88119ddefb75a1b794d2c9961c" exitCode=0 Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.841029 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" event={"ID":"38c94262-1b0c-4b65-939b-f3ea4171513f","Type":"ContainerDied","Data":"71230176b583517ccaa26a5ca56c01ae9cba8d88119ddefb75a1b794d2c9961c"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.843868 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b29378a-0de1-402e-993a-a83cc3d41b67","Type":"ContainerStarted","Data":"3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.846443 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.846559 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7975cd48fc-fzzwb" event={"ID":"6c677329-7994-4fb3-9ecb-777589a3870c","Type":"ContainerDied","Data":"f5969d96f549d00fe98e90168f3b080bc5016eb122b5dd99a3f509fbf306f199"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.855622 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"73377942-2512-4398-b6ca-25aa9a591619","Type":"ContainerStarted","Data":"77418b586c0f2000e8fa3552e256558138d2c9160349bca7a195e28a1bb6a9da"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.856154 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.858851 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c869998b-76f5-409d-9ff4-4abe3f7c9289","Type":"ContainerStarted","Data":"1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b"} Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.916600 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=13.307274399 podStartE2EDuration="19.916575932s" podCreationTimestamp="2025-10-13 13:21:26 +0000 UTC" firstStartedPulling="2025-10-13 13:21:37.26187371 +0000 UTC m=+851.829257780" lastFinishedPulling="2025-10-13 13:21:43.871175243 +0000 UTC m=+858.438559313" observedRunningTime="2025-10-13 13:21:45.915222768 +0000 UTC m=+860.482606848" watchObservedRunningTime="2025-10-13 13:21:45.916575932 +0000 UTC m=+860.483960002" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.955207 4684 scope.go:117] "RemoveContainer" containerID="c6baf0926005364cffaea1468a07afb58f8c94b9f528b953d1f89dbf14051650" Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.956278 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c9cdf56f7-g6gv9"] Oct 13 13:21:45 crc kubenswrapper[4684]: I1013 13:21:45.966836 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c9cdf56f7-g6gv9"] Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.039169 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7975cd48fc-fzzwb"] Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.056973 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7975cd48fc-fzzwb"] Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.368228 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="294b7604-06b5-46f9-84fb-5e9ffd82f705" path="/var/lib/kubelet/pods/294b7604-06b5-46f9-84fb-5e9ffd82f705/volumes" Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.369308 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c677329-7994-4fb3-9ecb-777589a3870c" path="/var/lib/kubelet/pods/6c677329-7994-4fb3-9ecb-777589a3870c/volumes" Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.867155 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" event={"ID":"3266d343-4fda-44d5-8e12-f44de29d6600","Type":"ContainerDied","Data":"8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d"} Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.867818 4684 generic.go:334] "Generic (PLEG): container finished" podID="3266d343-4fda-44d5-8e12-f44de29d6600" containerID="8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d" exitCode=0 Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.869368 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d9101813-710f-4c10-8510-415d62289ab1","Type":"ContainerStarted","Data":"4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b"} Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.869542 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.876154 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" event={"ID":"38c94262-1b0c-4b65-939b-f3ea4171513f","Type":"ContainerStarted","Data":"e35b4c3e55ea4c27fdb1345b1625da3eb260f3f4ad7b2ec201f802f513f38e14"} Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.876194 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.938844 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=14.574062532 podStartE2EDuration="18.938817816s" podCreationTimestamp="2025-10-13 13:21:28 +0000 UTC" firstStartedPulling="2025-10-13 13:21:41.662403727 +0000 UTC m=+856.229787797" lastFinishedPulling="2025-10-13 13:21:46.027159011 +0000 UTC m=+860.594543081" observedRunningTime="2025-10-13 13:21:46.916440478 +0000 UTC m=+861.483824558" watchObservedRunningTime="2025-10-13 13:21:46.938817816 +0000 UTC m=+861.506201896" Oct 13 13:21:46 crc kubenswrapper[4684]: I1013 13:21:46.946415 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" podStartSLOduration=4.612560545 podStartE2EDuration="24.946396493s" podCreationTimestamp="2025-10-13 13:21:22 +0000 UTC" firstStartedPulling="2025-10-13 13:21:23.537665905 +0000 UTC m=+838.105049975" lastFinishedPulling="2025-10-13 13:21:43.871501853 +0000 UTC m=+858.438885923" observedRunningTime="2025-10-13 13:21:46.940085678 +0000 UTC m=+861.507469748" watchObservedRunningTime="2025-10-13 13:21:46.946396493 +0000 UTC m=+861.513780573" Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.915068 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj" event={"ID":"a984144e-7322-4045-a696-7ec4b746e061","Type":"ContainerStarted","Data":"ec2aa408866fa7d3d8a0cd1d484c29102bc02a8c6f4b7cbc73fc5462830d95f1"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.915966 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jgthj" Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.918063 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ab39473e-47b6-4570-b1f6-f81ee811c19f","Type":"ContainerStarted","Data":"80785aa71d82bd8b796b63d424ceffaf4e22d7a77f4ea085a9a18ac2100ff1b6"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.919980 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b199f677-58e1-4a16-b904-5517b06a2b5e","Type":"ContainerStarted","Data":"df9b1192d37318153c80b27830f139896f7557f2842a73779c862613ed5bcae1"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.922025 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e","Type":"ContainerStarted","Data":"2631c9c56e21f5fb4768a974fedad83ac47333a6edbed91d6a3ff5cd2d60dda4"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.924712 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" event={"ID":"3266d343-4fda-44d5-8e12-f44de29d6600","Type":"ContainerStarted","Data":"94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.924926 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.927202 4684 generic.go:334] "Generic (PLEG): container finished" podID="2b902966-f950-4166-b594-afacd52e5346" containerID="f5ac95d851cb8c446b4f41e452c35ba88ef1fa3df11ea44a738aca3991049853" exitCode=0 Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.927246 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fc6hl" event={"ID":"2b902966-f950-4166-b594-afacd52e5346","Type":"ContainerDied","Data":"f5ac95d851cb8c446b4f41e452c35ba88ef1fa3df11ea44a738aca3991049853"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.930456 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4d4341d2-642e-4c4c-b517-edb89e87d1f0","Type":"ContainerStarted","Data":"5a8ccee803a071c337658b465d089cebfd5e4ac3e122d10ca5c074f7001fd767"} Oct 13 13:21:50 crc kubenswrapper[4684]: I1013 13:21:50.939588 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jgthj" podStartSLOduration=11.499799638 podStartE2EDuration="17.939570475s" podCreationTimestamp="2025-10-13 13:21:33 +0000 UTC" firstStartedPulling="2025-10-13 13:21:43.674558104 +0000 UTC m=+858.241942174" lastFinishedPulling="2025-10-13 13:21:50.114328931 +0000 UTC m=+864.681713011" observedRunningTime="2025-10-13 13:21:50.937081954 +0000 UTC m=+865.504466064" watchObservedRunningTime="2025-10-13 13:21:50.939570475 +0000 UTC m=+865.506954555" Oct 13 13:21:51 crc kubenswrapper[4684]: I1013 13:21:51.037982 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" podStartSLOduration=8.459016912 podStartE2EDuration="29.037960147s" podCreationTimestamp="2025-10-13 13:21:22 +0000 UTC" firstStartedPulling="2025-10-13 13:21:23.290761379 +0000 UTC m=+837.858145449" lastFinishedPulling="2025-10-13 13:21:43.869704624 +0000 UTC m=+858.437088684" observedRunningTime="2025-10-13 13:21:51.03622245 +0000 UTC m=+865.603606540" watchObservedRunningTime="2025-10-13 13:21:51.037960147 +0000 UTC m=+865.605344237" Oct 13 13:21:51 crc kubenswrapper[4684]: I1013 13:21:51.941263 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fc6hl" event={"ID":"2b902966-f950-4166-b594-afacd52e5346","Type":"ContainerStarted","Data":"7b5ba95bd55dff5c555b3f32d376c4dbe5751e387825bd1403be377931c02b0a"} Oct 13 13:21:51 crc kubenswrapper[4684]: I1013 13:21:51.941686 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fc6hl" event={"ID":"2b902966-f950-4166-b594-afacd52e5346","Type":"ContainerStarted","Data":"28cd0587414c0fb599ddf2fb7ee3709a29d8f1941fa1db09ad6a8e3db66ba48d"} Oct 13 13:21:51 crc kubenswrapper[4684]: I1013 13:21:51.967026 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-fc6hl" podStartSLOduration=12.675222258 podStartE2EDuration="18.967001928s" podCreationTimestamp="2025-10-13 13:21:33 +0000 UTC" firstStartedPulling="2025-10-13 13:21:43.686810383 +0000 UTC m=+858.254194453" lastFinishedPulling="2025-10-13 13:21:49.978590053 +0000 UTC m=+864.545974123" observedRunningTime="2025-10-13 13:21:51.960556398 +0000 UTC m=+866.527940478" watchObservedRunningTime="2025-10-13 13:21:51.967001928 +0000 UTC m=+866.534386018" Oct 13 13:21:52 crc kubenswrapper[4684]: I1013 13:21:52.153039 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 13 13:21:52 crc kubenswrapper[4684]: I1013 13:21:52.948789 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:52 crc kubenswrapper[4684]: I1013 13:21:52.948845 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.084991 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.130582 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65966b547c-f2sgb"] Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.130815 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" containerName="dnsmasq-dns" containerID="cri-o://94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417" gracePeriod=10 Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.715335 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.876294 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh64v\" (UniqueName: \"kubernetes.io/projected/3266d343-4fda-44d5-8e12-f44de29d6600-kube-api-access-hh64v\") pod \"3266d343-4fda-44d5-8e12-f44de29d6600\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.876761 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-config\") pod \"3266d343-4fda-44d5-8e12-f44de29d6600\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.876809 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-dns-svc\") pod \"3266d343-4fda-44d5-8e12-f44de29d6600\" (UID: \"3266d343-4fda-44d5-8e12-f44de29d6600\") " Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.890119 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3266d343-4fda-44d5-8e12-f44de29d6600-kube-api-access-hh64v" (OuterVolumeSpecName: "kube-api-access-hh64v") pod "3266d343-4fda-44d5-8e12-f44de29d6600" (UID: "3266d343-4fda-44d5-8e12-f44de29d6600"). InnerVolumeSpecName "kube-api-access-hh64v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.911534 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3266d343-4fda-44d5-8e12-f44de29d6600" (UID: "3266d343-4fda-44d5-8e12-f44de29d6600"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.914166 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-config" (OuterVolumeSpecName: "config") pod "3266d343-4fda-44d5-8e12-f44de29d6600" (UID: "3266d343-4fda-44d5-8e12-f44de29d6600"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.973622 4684 generic.go:334] "Generic (PLEG): container finished" podID="ab39473e-47b6-4570-b1f6-f81ee811c19f" containerID="80785aa71d82bd8b796b63d424ceffaf4e22d7a77f4ea085a9a18ac2100ff1b6" exitCode=0 Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.973693 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ab39473e-47b6-4570-b1f6-f81ee811c19f","Type":"ContainerDied","Data":"80785aa71d82bd8b796b63d424ceffaf4e22d7a77f4ea085a9a18ac2100ff1b6"} Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.977299 4684 generic.go:334] "Generic (PLEG): container finished" podID="3266d343-4fda-44d5-8e12-f44de29d6600" containerID="94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417" exitCode=0 Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.977406 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.977412 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" event={"ID":"3266d343-4fda-44d5-8e12-f44de29d6600","Type":"ContainerDied","Data":"94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417"} Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.977543 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65966b547c-f2sgb" event={"ID":"3266d343-4fda-44d5-8e12-f44de29d6600","Type":"ContainerDied","Data":"f6a60e5df06a74e0e4bc09ff262f4389d110eb17633642d11f09d41b149ece24"} Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.977569 4684 scope.go:117] "RemoveContainer" containerID="94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.980250 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.980275 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3266d343-4fda-44d5-8e12-f44de29d6600-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.980286 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh64v\" (UniqueName: \"kubernetes.io/projected/3266d343-4fda-44d5-8e12-f44de29d6600-kube-api-access-hh64v\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.981528 4684 generic.go:334] "Generic (PLEG): container finished" podID="4d4341d2-642e-4c4c-b517-edb89e87d1f0" containerID="5a8ccee803a071c337658b465d089cebfd5e4ac3e122d10ca5c074f7001fd767" exitCode=0 Oct 13 13:21:53 crc kubenswrapper[4684]: I1013 13:21:53.981570 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4d4341d2-642e-4c4c-b517-edb89e87d1f0","Type":"ContainerDied","Data":"5a8ccee803a071c337658b465d089cebfd5e4ac3e122d10ca5c074f7001fd767"} Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.004204 4684 scope.go:117] "RemoveContainer" containerID="8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d" Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.040717 4684 scope.go:117] "RemoveContainer" containerID="94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417" Oct 13 13:21:54 crc kubenswrapper[4684]: E1013 13:21:54.051136 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417\": container with ID starting with 94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417 not found: ID does not exist" containerID="94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417" Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.051288 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417"} err="failed to get container status \"94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417\": rpc error: code = NotFound desc = could not find container \"94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417\": container with ID starting with 94670d6d21b203060ca42ab3e01aaa5b9faa1e8951603c15ea78b905e7c20417 not found: ID does not exist" Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.051379 4684 scope.go:117] "RemoveContainer" containerID="8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d" Oct 13 13:21:54 crc kubenswrapper[4684]: E1013 13:21:54.052352 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d\": container with ID starting with 8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d not found: ID does not exist" containerID="8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d" Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.052405 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d"} err="failed to get container status \"8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d\": rpc error: code = NotFound desc = could not find container \"8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d\": container with ID starting with 8921f7b25bc586897b044d0de7904dd6173a92229d6e3e6d97b5574f3e80321d not found: ID does not exist" Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.054019 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65966b547c-f2sgb"] Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.058756 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65966b547c-f2sgb"] Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.359365 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" path="/var/lib/kubelet/pods/3266d343-4fda-44d5-8e12-f44de29d6600/volumes" Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.990971 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"40f06c76-a8c8-4f1c-a8b4-49fe81d3912e","Type":"ContainerStarted","Data":"f3f486abba7f624abbe8b899d72cacd3731a854b710300b092dd0d7617bde82f"} Oct 13 13:21:54 crc kubenswrapper[4684]: I1013 13:21:54.995091 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4d4341d2-642e-4c4c-b517-edb89e87d1f0","Type":"ContainerStarted","Data":"ba468d110efaae8353cc8e35e65059a70931bde3312d428b3a4e1fc917317977"} Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:54.999020 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ab39473e-47b6-4570-b1f6-f81ee811c19f","Type":"ContainerStarted","Data":"da2197bebe195e5ab6743188816234a824d3f4ae0215ae230e10c709d870e374"} Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.005264 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b199f677-58e1-4a16-b904-5517b06a2b5e","Type":"ContainerStarted","Data":"6ccfdaa044ada97db3f6d6d5536a32403fdb2d0e7c786489153b04d27be5989d"} Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.013627 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=10.798687674 podStartE2EDuration="21.013571737s" podCreationTimestamp="2025-10-13 13:21:34 +0000 UTC" firstStartedPulling="2025-10-13 13:21:43.704590141 +0000 UTC m=+858.271974211" lastFinishedPulling="2025-10-13 13:21:53.919474204 +0000 UTC m=+868.486858274" observedRunningTime="2025-10-13 13:21:55.012232883 +0000 UTC m=+869.579616963" watchObservedRunningTime="2025-10-13 13:21:55.013571737 +0000 UTC m=+869.580955807" Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.043061 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=23.465820995 podStartE2EDuration="31.043036965s" podCreationTimestamp="2025-10-13 13:21:24 +0000 UTC" firstStartedPulling="2025-10-13 13:21:42.534225537 +0000 UTC m=+857.101609607" lastFinishedPulling="2025-10-13 13:21:50.111441507 +0000 UTC m=+864.678825577" observedRunningTime="2025-10-13 13:21:55.039767648 +0000 UTC m=+869.607151718" watchObservedRunningTime="2025-10-13 13:21:55.043036965 +0000 UTC m=+869.610421035" Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.065236 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=22.620486029 podStartE2EDuration="30.065217487s" podCreationTimestamp="2025-10-13 13:21:25 +0000 UTC" firstStartedPulling="2025-10-13 13:21:42.533877766 +0000 UTC m=+857.101261836" lastFinishedPulling="2025-10-13 13:21:49.978609224 +0000 UTC m=+864.545993294" observedRunningTime="2025-10-13 13:21:55.060184263 +0000 UTC m=+869.627568343" watchObservedRunningTime="2025-10-13 13:21:55.065217487 +0000 UTC m=+869.632601557" Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.094893 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=10.852756045 podStartE2EDuration="21.094869742s" podCreationTimestamp="2025-10-13 13:21:34 +0000 UTC" firstStartedPulling="2025-10-13 13:21:43.669588803 +0000 UTC m=+858.236972893" lastFinishedPulling="2025-10-13 13:21:53.91170253 +0000 UTC m=+868.479086590" observedRunningTime="2025-10-13 13:21:55.086117657 +0000 UTC m=+869.653501757" watchObservedRunningTime="2025-10-13 13:21:55.094869742 +0000 UTC m=+869.662253812" Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.947042 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 13 13:21:55 crc kubenswrapper[4684]: I1013 13:21:55.947371 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 13 13:21:56 crc kubenswrapper[4684]: I1013 13:21:56.035816 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:56 crc kubenswrapper[4684]: I1013 13:21:56.219970 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:57 crc kubenswrapper[4684]: I1013 13:21:57.035099 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:57 crc kubenswrapper[4684]: I1013 13:21:57.060326 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:57 crc kubenswrapper[4684]: I1013 13:21:57.060413 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:57 crc kubenswrapper[4684]: I1013 13:21:57.071633 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:57 crc kubenswrapper[4684]: I1013 13:21:57.220523 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:57 crc kubenswrapper[4684]: I1013 13:21:57.255705 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.058873 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.060012 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.378144 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6655c7dd6c-fhfvg"] Oct 13 13:21:58 crc kubenswrapper[4684]: E1013 13:21:58.379682 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c677329-7994-4fb3-9ecb-777589a3870c" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.379703 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c677329-7994-4fb3-9ecb-777589a3870c" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: E1013 13:21:58.379740 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="294b7604-06b5-46f9-84fb-5e9ffd82f705" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.379750 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="294b7604-06b5-46f9-84fb-5e9ffd82f705" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: E1013 13:21:58.379761 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.379770 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: E1013 13:21:58.379788 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" containerName="dnsmasq-dns" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.379796 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" containerName="dnsmasq-dns" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.380065 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="294b7604-06b5-46f9-84fb-5e9ffd82f705" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.380089 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="3266d343-4fda-44d5-8e12-f44de29d6600" containerName="dnsmasq-dns" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.380107 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c677329-7994-4fb3-9ecb-777589a3870c" containerName="init" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.384890 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.390868 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.393078 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6655c7dd6c-fhfvg"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.400969 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-fwm22"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.402089 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.406928 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-fwm22"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.412125 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.458589 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b5496-bd3a-41b4-aef4-546dd2e85a83-combined-ca-bundle\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.458700 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frzk7\" (UniqueName: \"kubernetes.io/projected/846b5496-bd3a-41b4-aef4-546dd2e85a83-kube-api-access-frzk7\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.458756 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846b5496-bd3a-41b4-aef4-546dd2e85a83-config\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.458883 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzln8\" (UniqueName: \"kubernetes.io/projected/aa772389-90ec-4a57-ab32-b3e6dc769f21-kube-api-access-lzln8\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.458960 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-dns-svc\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.458994 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-ovsdbserver-nb\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.459100 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-config\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.459150 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/846b5496-bd3a-41b4-aef4-546dd2e85a83-ovn-rundir\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.459212 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/846b5496-bd3a-41b4-aef4-546dd2e85a83-ovs-rundir\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.459366 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/846b5496-bd3a-41b4-aef4-546dd2e85a83-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.487760 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6655c7dd6c-fhfvg"] Oct 13 13:21:58 crc kubenswrapper[4684]: E1013 13:21:58.490331 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-lzln8 ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" podUID="aa772389-90ec-4a57-ab32-b3e6dc769f21" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.515441 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ff587cdc-kdsbk"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.516974 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.520062 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.528623 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ff587cdc-kdsbk"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.559566 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560603 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-dns-svc\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560650 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-ovsdbserver-nb\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560695 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-config\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560723 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/846b5496-bd3a-41b4-aef4-546dd2e85a83-ovn-rundir\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560752 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-dns-svc\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560782 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/846b5496-bd3a-41b4-aef4-546dd2e85a83-ovs-rundir\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560810 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/846b5496-bd3a-41b4-aef4-546dd2e85a83-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560834 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b5496-bd3a-41b4-aef4-546dd2e85a83-combined-ca-bundle\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560863 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-config\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560925 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frzk7\" (UniqueName: \"kubernetes.io/projected/846b5496-bd3a-41b4-aef4-546dd2e85a83-kube-api-access-frzk7\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560950 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-nb\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.560970 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpbcq\" (UniqueName: \"kubernetes.io/projected/af439788-a28d-45c9-b195-b614c82d5f7e-kube-api-access-qpbcq\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.561012 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846b5496-bd3a-41b4-aef4-546dd2e85a83-config\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.561051 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-sb\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.561087 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzln8\" (UniqueName: \"kubernetes.io/projected/aa772389-90ec-4a57-ab32-b3e6dc769f21-kube-api-access-lzln8\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.561359 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.561528 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-dns-svc\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.562615 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/846b5496-bd3a-41b4-aef4-546dd2e85a83-ovn-rundir\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.562717 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/846b5496-bd3a-41b4-aef4-546dd2e85a83-ovs-rundir\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.565342 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-ovsdbserver-nb\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.565360 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.565438 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-2pmd6" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.565565 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.565689 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.565858 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/846b5496-bd3a-41b4-aef4-546dd2e85a83-config\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.567359 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b5496-bd3a-41b4-aef4-546dd2e85a83-combined-ca-bundle\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.569098 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-config\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.576218 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/846b5496-bd3a-41b4-aef4-546dd2e85a83-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.595243 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frzk7\" (UniqueName: \"kubernetes.io/projected/846b5496-bd3a-41b4-aef4-546dd2e85a83-kube-api-access-frzk7\") pod \"ovn-controller-metrics-fwm22\" (UID: \"846b5496-bd3a-41b4-aef4-546dd2e85a83\") " pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.603272 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.609552 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzln8\" (UniqueName: \"kubernetes.io/projected/aa772389-90ec-4a57-ab32-b3e6dc769f21-kube-api-access-lzln8\") pod \"dnsmasq-dns-6655c7dd6c-fhfvg\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662221 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0f64874-c6da-4e22-b3ba-29679844c1e2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662272 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662310 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-sb\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662364 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662399 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4xw5\" (UniqueName: \"kubernetes.io/projected/a0f64874-c6da-4e22-b3ba-29679844c1e2-kube-api-access-h4xw5\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662425 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0f64874-c6da-4e22-b3ba-29679844c1e2-config\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662460 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0f64874-c6da-4e22-b3ba-29679844c1e2-scripts\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662479 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-dns-svc\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662580 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-config\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662622 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-nb\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662648 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.662691 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpbcq\" (UniqueName: \"kubernetes.io/projected/af439788-a28d-45c9-b195-b614c82d5f7e-kube-api-access-qpbcq\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.663146 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-dns-svc\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.663150 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-sb\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.663617 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-config\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.664450 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-nb\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.685941 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpbcq\" (UniqueName: \"kubernetes.io/projected/af439788-a28d-45c9-b195-b614c82d5f7e-kube-api-access-qpbcq\") pod \"dnsmasq-dns-6ff587cdc-kdsbk\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.735559 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-fwm22" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763149 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0f64874-c6da-4e22-b3ba-29679844c1e2-scripts\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763228 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763257 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0f64874-c6da-4e22-b3ba-29679844c1e2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763276 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763308 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763329 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4xw5\" (UniqueName: \"kubernetes.io/projected/a0f64874-c6da-4e22-b3ba-29679844c1e2-kube-api-access-h4xw5\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.763346 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0f64874-c6da-4e22-b3ba-29679844c1e2-config\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.764301 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0f64874-c6da-4e22-b3ba-29679844c1e2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.764480 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0f64874-c6da-4e22-b3ba-29679844c1e2-scripts\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.766162 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0f64874-c6da-4e22-b3ba-29679844c1e2-config\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.766686 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.766732 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.771631 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f64874-c6da-4e22-b3ba-29679844c1e2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.780270 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4xw5\" (UniqueName: \"kubernetes.io/projected/a0f64874-c6da-4e22-b3ba-29679844c1e2-kube-api-access-h4xw5\") pod \"ovn-northd-0\" (UID: \"a0f64874-c6da-4e22-b3ba-29679844c1e2\") " pod="openstack/ovn-northd-0" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.863207 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:21:58 crc kubenswrapper[4684]: I1013 13:21:58.969198 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.046880 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.069128 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.129570 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.150218 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ff587cdc-kdsbk"] Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.170661 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-config\") pod \"aa772389-90ec-4a57-ab32-b3e6dc769f21\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.170731 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzln8\" (UniqueName: \"kubernetes.io/projected/aa772389-90ec-4a57-ab32-b3e6dc769f21-kube-api-access-lzln8\") pod \"aa772389-90ec-4a57-ab32-b3e6dc769f21\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.170842 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-dns-svc\") pod \"aa772389-90ec-4a57-ab32-b3e6dc769f21\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.173357 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-config" (OuterVolumeSpecName: "config") pod "aa772389-90ec-4a57-ab32-b3e6dc769f21" (UID: "aa772389-90ec-4a57-ab32-b3e6dc769f21"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.176560 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aa772389-90ec-4a57-ab32-b3e6dc769f21" (UID: "aa772389-90ec-4a57-ab32-b3e6dc769f21"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.182022 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-ovsdbserver-nb\") pod \"aa772389-90ec-4a57-ab32-b3e6dc769f21\" (UID: \"aa772389-90ec-4a57-ab32-b3e6dc769f21\") " Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.182666 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.182688 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.183600 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "aa772389-90ec-4a57-ab32-b3e6dc769f21" (UID: "aa772389-90ec-4a57-ab32-b3e6dc769f21"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.183752 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa772389-90ec-4a57-ab32-b3e6dc769f21-kube-api-access-lzln8" (OuterVolumeSpecName: "kube-api-access-lzln8") pod "aa772389-90ec-4a57-ab32-b3e6dc769f21" (UID: "aa772389-90ec-4a57-ab32-b3e6dc769f21"). InnerVolumeSpecName "kube-api-access-lzln8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.187499 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-747494c85c-8wvj7"] Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.189192 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.199102 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-fwm22"] Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.227122 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-747494c85c-8wvj7"] Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.241197 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.284091 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzln8\" (UniqueName: \"kubernetes.io/projected/aa772389-90ec-4a57-ab32-b3e6dc769f21-kube-api-access-lzln8\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.284437 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aa772389-90ec-4a57-ab32-b3e6dc769f21-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.326602 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.353740 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ff587cdc-kdsbk"] Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.388525 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-config\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.388610 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xw2p\" (UniqueName: \"kubernetes.io/projected/0d3dc831-9857-48cd-818e-cbc797668403-kube-api-access-9xw2p\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.388653 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-dns-svc\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.388679 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-sb\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.388766 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-nb\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.490750 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-dns-svc\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.490796 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-sb\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.490879 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-nb\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.490969 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-config\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.491018 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xw2p\" (UniqueName: \"kubernetes.io/projected/0d3dc831-9857-48cd-818e-cbc797668403-kube-api-access-9xw2p\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.492130 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-config\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.493192 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-sb\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.493225 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-dns-svc\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.493222 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-nb\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.523837 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xw2p\" (UniqueName: \"kubernetes.io/projected/0d3dc831-9857-48cd-818e-cbc797668403-kube-api-access-9xw2p\") pod \"dnsmasq-dns-747494c85c-8wvj7\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.670324 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 13 13:21:59 crc kubenswrapper[4684]: W1013 13:21:59.699041 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0f64874_c6da_4e22_b3ba_29679844c1e2.slice/crio-e2b80a6d9cae4dd75f77a91f58b4c4307c069df811184cbfe3c6f4cb9a1824e9 WatchSource:0}: Error finding container e2b80a6d9cae4dd75f77a91f58b4c4307c069df811184cbfe3c6f4cb9a1824e9: Status 404 returned error can't find the container with id e2b80a6d9cae4dd75f77a91f58b4c4307c069df811184cbfe3c6f4cb9a1824e9 Oct 13 13:21:59 crc kubenswrapper[4684]: I1013 13:21:59.816171 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.056576 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-fwm22" event={"ID":"846b5496-bd3a-41b4-aef4-546dd2e85a83","Type":"ContainerStarted","Data":"9bf37f68ff662c9d42de80186ef4166b79a41dec4cb178ed14ddaaae116b3b30"} Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.056832 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-fwm22" event={"ID":"846b5496-bd3a-41b4-aef4-546dd2e85a83","Type":"ContainerStarted","Data":"3d00a5b0bc7dfad505f99ce82d4993d9aea5f3f1e95d8ab5b0e525c7b7c9b428"} Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.063344 4684 generic.go:334] "Generic (PLEG): container finished" podID="af439788-a28d-45c9-b195-b614c82d5f7e" containerID="7f98dcbfe1298293aebe35b4e7a068999b7b06a3056c5b40849be525618428f4" exitCode=0 Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.063537 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" event={"ID":"af439788-a28d-45c9-b195-b614c82d5f7e","Type":"ContainerDied","Data":"7f98dcbfe1298293aebe35b4e7a068999b7b06a3056c5b40849be525618428f4"} Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.063575 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" event={"ID":"af439788-a28d-45c9-b195-b614c82d5f7e","Type":"ContainerStarted","Data":"c8f24eb9c276a7f6a80648e39787f02daefaa614b88b59d00e014a2ba98f7ef0"} Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.065858 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a0f64874-c6da-4e22-b3ba-29679844c1e2","Type":"ContainerStarted","Data":"e2b80a6d9cae4dd75f77a91f58b4c4307c069df811184cbfe3c6f4cb9a1824e9"} Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.066573 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6655c7dd6c-fhfvg" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.082535 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-fwm22" podStartSLOduration=2.082326568 podStartE2EDuration="2.082326568s" podCreationTimestamp="2025-10-13 13:21:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:22:00.071656611 +0000 UTC m=+874.639040691" watchObservedRunningTime="2025-10-13 13:22:00.082326568 +0000 UTC m=+874.649710638" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.149955 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6655c7dd6c-fhfvg"] Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.154802 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6655c7dd6c-fhfvg"] Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.305469 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-747494c85c-8wvj7"] Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.310893 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 13 13:22:00 crc kubenswrapper[4684]: W1013 13:22:00.314713 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d3dc831_9857_48cd_818e_cbc797668403.slice/crio-60157f26235ad84fbd38e0e4b051e3493286263ea496419f0eba580a732feca5 WatchSource:0}: Error finding container 60157f26235ad84fbd38e0e4b051e3493286263ea496419f0eba580a732feca5: Status 404 returned error can't find the container with id 60157f26235ad84fbd38e0e4b051e3493286263ea496419f0eba580a732feca5 Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.316555 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.319476 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.319480 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.319512 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.323197 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-72wnl" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.342707 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.361299 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa772389-90ec-4a57-ab32-b3e6dc769f21" path="/var/lib/kubelet/pods/aa772389-90ec-4a57-ab32-b3e6dc769f21/volumes" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.407081 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.407396 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.407454 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h2vq\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-kube-api-access-5h2vq\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.407471 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-cache\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.407494 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-lock\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.512825 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h2vq\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-kube-api-access-5h2vq\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.512873 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-cache\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.512922 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-lock\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.513070 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.513136 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.513442 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-lock\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: E1013 13:22:00.513530 4684 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 13:22:00 crc kubenswrapper[4684]: E1013 13:22:00.513549 4684 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 13:22:00 crc kubenswrapper[4684]: E1013 13:22:00.513596 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift podName:bca9c4e2-0bbb-4828-bd3a-12c0a75b8946 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:01.013574412 +0000 UTC m=+875.580958592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift") pod "swift-storage-0" (UID: "bca9c4e2-0bbb-4828-bd3a-12c0a75b8946") : configmap "swift-ring-files" not found Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.513634 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-cache\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.513969 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.535054 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h2vq\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-kube-api-access-5h2vq\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.542406 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.560215 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.560285 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.854205 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-m58km"] Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.855294 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.858486 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.859866 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.861257 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.875997 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-m58km"] Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921572 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-scripts\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921659 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-dispersionconf\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921689 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921734 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b1d3d985-3822-439b-9e3b-629629e83b34-etc-swift\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921750 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-ring-data-devices\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921941 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-combined-ca-bundle\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:00 crc kubenswrapper[4684]: I1013 13:22:00.921970 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lhh6\" (UniqueName: \"kubernetes.io/projected/b1d3d985-3822-439b-9e3b-629629e83b34-kube-api-access-6lhh6\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023430 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-combined-ca-bundle\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023487 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lhh6\" (UniqueName: \"kubernetes.io/projected/b1d3d985-3822-439b-9e3b-629629e83b34-kube-api-access-6lhh6\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023566 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-scripts\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023617 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-dispersionconf\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023641 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023659 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023677 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b1d3d985-3822-439b-9e3b-629629e83b34-etc-swift\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.023696 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-ring-data-devices\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: E1013 13:22:01.023775 4684 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 13:22:01 crc kubenswrapper[4684]: E1013 13:22:01.023811 4684 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 13:22:01 crc kubenswrapper[4684]: E1013 13:22:01.023873 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift podName:bca9c4e2-0bbb-4828-bd3a-12c0a75b8946 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:02.023849617 +0000 UTC m=+876.591233687 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift") pod "swift-storage-0" (UID: "bca9c4e2-0bbb-4828-bd3a-12c0a75b8946") : configmap "swift-ring-files" not found Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.024569 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b1d3d985-3822-439b-9e3b-629629e83b34-etc-swift\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.024937 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-scripts\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.025168 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-ring-data-devices\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.031199 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-dispersionconf\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.033865 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.041407 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lhh6\" (UniqueName: \"kubernetes.io/projected/b1d3d985-3822-439b-9e3b-629629e83b34-kube-api-access-6lhh6\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.050876 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-combined-ca-bundle\") pod \"swift-ring-rebalance-m58km\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.077610 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" event={"ID":"0d3dc831-9857-48cd-818e-cbc797668403","Type":"ContainerStarted","Data":"60157f26235ad84fbd38e0e4b051e3493286263ea496419f0eba580a732feca5"} Oct 13 13:22:01 crc kubenswrapper[4684]: I1013 13:22:01.178957 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:02 crc kubenswrapper[4684]: I1013 13:22:02.041326 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:02 crc kubenswrapper[4684]: E1013 13:22:02.041539 4684 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 13:22:02 crc kubenswrapper[4684]: E1013 13:22:02.041879 4684 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 13:22:02 crc kubenswrapper[4684]: E1013 13:22:02.041976 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift podName:bca9c4e2-0bbb-4828-bd3a-12c0a75b8946 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:04.041950686 +0000 UTC m=+878.609334796 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift") pod "swift-storage-0" (UID: "bca9c4e2-0bbb-4828-bd3a-12c0a75b8946") : configmap "swift-ring-files" not found Oct 13 13:22:03 crc kubenswrapper[4684]: I1013 13:22:03.896560 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 13 13:22:03 crc kubenswrapper[4684]: I1013 13:22:03.968763 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="4d4341d2-642e-4c4c-b517-edb89e87d1f0" containerName="galera" probeResult="failure" output=< Oct 13 13:22:03 crc kubenswrapper[4684]: wsrep_local_state_comment (Joined) differs from Synced Oct 13 13:22:03 crc kubenswrapper[4684]: > Oct 13 13:22:04 crc kubenswrapper[4684]: I1013 13:22:04.075886 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:04 crc kubenswrapper[4684]: E1013 13:22:04.076169 4684 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 13:22:04 crc kubenswrapper[4684]: E1013 13:22:04.076188 4684 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 13:22:04 crc kubenswrapper[4684]: E1013 13:22:04.076251 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift podName:bca9c4e2-0bbb-4828-bd3a-12c0a75b8946 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:08.076232314 +0000 UTC m=+882.643616384 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift") pod "swift-storage-0" (UID: "bca9c4e2-0bbb-4828-bd3a-12c0a75b8946") : configmap "swift-ring-files" not found Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.008974 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.602510 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.739052 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpbcq\" (UniqueName: \"kubernetes.io/projected/af439788-a28d-45c9-b195-b614c82d5f7e-kube-api-access-qpbcq\") pod \"af439788-a28d-45c9-b195-b614c82d5f7e\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.739125 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-dns-svc\") pod \"af439788-a28d-45c9-b195-b614c82d5f7e\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.739162 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-nb\") pod \"af439788-a28d-45c9-b195-b614c82d5f7e\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.739230 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-config\") pod \"af439788-a28d-45c9-b195-b614c82d5f7e\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.739316 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-sb\") pod \"af439788-a28d-45c9-b195-b614c82d5f7e\" (UID: \"af439788-a28d-45c9-b195-b614c82d5f7e\") " Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.750389 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af439788-a28d-45c9-b195-b614c82d5f7e-kube-api-access-qpbcq" (OuterVolumeSpecName: "kube-api-access-qpbcq") pod "af439788-a28d-45c9-b195-b614c82d5f7e" (UID: "af439788-a28d-45c9-b195-b614c82d5f7e"). InnerVolumeSpecName "kube-api-access-qpbcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.769187 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "af439788-a28d-45c9-b195-b614c82d5f7e" (UID: "af439788-a28d-45c9-b195-b614c82d5f7e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.771559 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "af439788-a28d-45c9-b195-b614c82d5f7e" (UID: "af439788-a28d-45c9-b195-b614c82d5f7e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.773649 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-config" (OuterVolumeSpecName: "config") pod "af439788-a28d-45c9-b195-b614c82d5f7e" (UID: "af439788-a28d-45c9-b195-b614c82d5f7e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.781794 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "af439788-a28d-45c9-b195-b614c82d5f7e" (UID: "af439788-a28d-45c9-b195-b614c82d5f7e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.811431 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-wzmnn"] Oct 13 13:22:06 crc kubenswrapper[4684]: E1013 13:22:06.811751 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af439788-a28d-45c9-b195-b614c82d5f7e" containerName="init" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.811769 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="af439788-a28d-45c9-b195-b614c82d5f7e" containerName="init" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.811931 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="af439788-a28d-45c9-b195-b614c82d5f7e" containerName="init" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.813598 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.824831 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wzmnn"] Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.841596 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpbcq\" (UniqueName: \"kubernetes.io/projected/af439788-a28d-45c9-b195-b614c82d5f7e-kube-api-access-qpbcq\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.841713 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.841795 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.841857 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.841932 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af439788-a28d-45c9-b195-b614c82d5f7e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.893786 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-m58km"] Oct 13 13:22:06 crc kubenswrapper[4684]: W1013 13:22:06.897103 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1d3d985_3822_439b_9e3b_629629e83b34.slice/crio-d4baa5297cc3e1a246fac792d826d426d2716eae3e71dbae3dd289939dd801f6 WatchSource:0}: Error finding container d4baa5297cc3e1a246fac792d826d426d2716eae3e71dbae3dd289939dd801f6: Status 404 returned error can't find the container with id d4baa5297cc3e1a246fac792d826d426d2716eae3e71dbae3dd289939dd801f6 Oct 13 13:22:06 crc kubenswrapper[4684]: I1013 13:22:06.943388 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvf49\" (UniqueName: \"kubernetes.io/projected/9e77f496-7c3a-4769-847c-216d887734f9-kube-api-access-gvf49\") pod \"keystone-db-create-wzmnn\" (UID: \"9e77f496-7c3a-4769-847c-216d887734f9\") " pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.017107 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9h7cm"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.018710 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.028755 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9h7cm"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.079264 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvf49\" (UniqueName: \"kubernetes.io/projected/9e77f496-7c3a-4769-847c-216d887734f9-kube-api-access-gvf49\") pod \"keystone-db-create-wzmnn\" (UID: \"9e77f496-7c3a-4769-847c-216d887734f9\") " pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.100939 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvf49\" (UniqueName: \"kubernetes.io/projected/9e77f496-7c3a-4769-847c-216d887734f9-kube-api-access-gvf49\") pod \"keystone-db-create-wzmnn\" (UID: \"9e77f496-7c3a-4769-847c-216d887734f9\") " pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.138945 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" event={"ID":"af439788-a28d-45c9-b195-b614c82d5f7e","Type":"ContainerDied","Data":"c8f24eb9c276a7f6a80648e39787f02daefaa614b88b59d00e014a2ba98f7ef0"} Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.138999 4684 scope.go:117] "RemoveContainer" containerID="7f98dcbfe1298293aebe35b4e7a068999b7b06a3056c5b40849be525618428f4" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.139103 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ff587cdc-kdsbk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.147814 4684 generic.go:334] "Generic (PLEG): container finished" podID="0d3dc831-9857-48cd-818e-cbc797668403" containerID="8a02efc2eecdf385ca3687e8a8aed10bb53cf30451bdd27badfcde46940c5f30" exitCode=0 Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.147990 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" event={"ID":"0d3dc831-9857-48cd-818e-cbc797668403","Type":"ContainerDied","Data":"8a02efc2eecdf385ca3687e8a8aed10bb53cf30451bdd27badfcde46940c5f30"} Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.152078 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a0f64874-c6da-4e22-b3ba-29679844c1e2","Type":"ContainerStarted","Data":"ad23ac486a04776e73c8bf27d808050b882ef9dbe7b08422e19da04ebbe9354a"} Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.158420 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m58km" event={"ID":"b1d3d985-3822-439b-9e3b-629629e83b34","Type":"ContainerStarted","Data":"d4baa5297cc3e1a246fac792d826d426d2716eae3e71dbae3dd289939dd801f6"} Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.181109 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcpfs\" (UniqueName: \"kubernetes.io/projected/7ca28019-9f27-416e-a635-d87fb09c55ae-kube-api-access-qcpfs\") pod \"placement-db-create-9h7cm\" (UID: \"7ca28019-9f27-416e-a635-d87fb09c55ae\") " pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.201056 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.209698 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ff587cdc-kdsbk"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.214104 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ff587cdc-kdsbk"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.284269 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcpfs\" (UniqueName: \"kubernetes.io/projected/7ca28019-9f27-416e-a635-d87fb09c55ae-kube-api-access-qcpfs\") pod \"placement-db-create-9h7cm\" (UID: \"7ca28019-9f27-416e-a635-d87fb09c55ae\") " pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.306129 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcpfs\" (UniqueName: \"kubernetes.io/projected/7ca28019-9f27-416e-a635-d87fb09c55ae-kube-api-access-qcpfs\") pod \"placement-db-create-9h7cm\" (UID: \"7ca28019-9f27-416e-a635-d87fb09c55ae\") " pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.415463 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.450643 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9krrk"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.452224 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.461886 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9krrk"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.590774 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-utilities\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.591205 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-catalog-content\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.591251 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkrrg\" (UniqueName: \"kubernetes.io/projected/152e78e7-710e-4b68-9658-d7a2ed67c25a-kube-api-access-gkrrg\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.615882 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wzmnn"] Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.695208 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-utilities\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.695345 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-catalog-content\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.695397 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkrrg\" (UniqueName: \"kubernetes.io/projected/152e78e7-710e-4b68-9658-d7a2ed67c25a-kube-api-access-gkrrg\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.695796 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-utilities\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.698521 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-catalog-content\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.718789 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkrrg\" (UniqueName: \"kubernetes.io/projected/152e78e7-710e-4b68-9658-d7a2ed67c25a-kube-api-access-gkrrg\") pod \"redhat-marketplace-9krrk\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:07 crc kubenswrapper[4684]: I1013 13:22:07.828121 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:07.997995 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9h7cm"] Oct 13 13:22:08 crc kubenswrapper[4684]: W1013 13:22:08.068919 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ca28019_9f27_416e_a635_d87fb09c55ae.slice/crio-2ef797c339e62c0d6ea146426081c94ce549af3e382bbd093a256532e08c8f9c WatchSource:0}: Error finding container 2ef797c339e62c0d6ea146426081c94ce549af3e382bbd093a256532e08c8f9c: Status 404 returned error can't find the container with id 2ef797c339e62c0d6ea146426081c94ce549af3e382bbd093a256532e08c8f9c Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.102503 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:08 crc kubenswrapper[4684]: E1013 13:22:08.102671 4684 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 13:22:08 crc kubenswrapper[4684]: E1013 13:22:08.102702 4684 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 13:22:08 crc kubenswrapper[4684]: E1013 13:22:08.102765 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift podName:bca9c4e2-0bbb-4828-bd3a-12c0a75b8946 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:16.10274307 +0000 UTC m=+890.670127140 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift") pod "swift-storage-0" (UID: "bca9c4e2-0bbb-4828-bd3a-12c0a75b8946") : configmap "swift-ring-files" not found Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.167147 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9h7cm" event={"ID":"7ca28019-9f27-416e-a635-d87fb09c55ae","Type":"ContainerStarted","Data":"2ef797c339e62c0d6ea146426081c94ce549af3e382bbd093a256532e08c8f9c"} Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.170095 4684 generic.go:334] "Generic (PLEG): container finished" podID="9e77f496-7c3a-4769-847c-216d887734f9" containerID="4c0b4003decb737fb839dd773ff181c99716d767ea36001bd6fb90544141fa89" exitCode=0 Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.170144 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wzmnn" event={"ID":"9e77f496-7c3a-4769-847c-216d887734f9","Type":"ContainerDied","Data":"4c0b4003decb737fb839dd773ff181c99716d767ea36001bd6fb90544141fa89"} Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.170223 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wzmnn" event={"ID":"9e77f496-7c3a-4769-847c-216d887734f9","Type":"ContainerStarted","Data":"14d9e64fdfc47b44b9db0769c1b36d8f3a2598c768b7d88f7442208aa4ae571a"} Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.174178 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" event={"ID":"0d3dc831-9857-48cd-818e-cbc797668403","Type":"ContainerStarted","Data":"2bbdb654d628f0c3d8e14cdd14a20bc78b9939776625b580ad618e23f49dea6d"} Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.175208 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.177205 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a0f64874-c6da-4e22-b3ba-29679844c1e2","Type":"ContainerStarted","Data":"21c8191089202efbfda0060bc47d2b8de30f0399e0585c88007f0312229f049b"} Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.177959 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.216108 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.090648431 podStartE2EDuration="10.216088639s" podCreationTimestamp="2025-10-13 13:21:58 +0000 UTC" firstStartedPulling="2025-10-13 13:21:59.703507782 +0000 UTC m=+874.270891852" lastFinishedPulling="2025-10-13 13:22:06.82894798 +0000 UTC m=+881.396332060" observedRunningTime="2025-10-13 13:22:08.207723987 +0000 UTC m=+882.775108087" watchObservedRunningTime="2025-10-13 13:22:08.216088639 +0000 UTC m=+882.783472709" Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.227655 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" podStartSLOduration=9.227636335 podStartE2EDuration="9.227636335s" podCreationTimestamp="2025-10-13 13:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:22:08.223521831 +0000 UTC m=+882.790905891" watchObservedRunningTime="2025-10-13 13:22:08.227636335 +0000 UTC m=+882.795020405" Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.364536 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af439788-a28d-45c9-b195-b614c82d5f7e" path="/var/lib/kubelet/pods/af439788-a28d-45c9-b195-b614c82d5f7e/volumes" Oct 13 13:22:08 crc kubenswrapper[4684]: I1013 13:22:08.365536 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9krrk"] Oct 13 13:22:08 crc kubenswrapper[4684]: W1013 13:22:08.390778 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod152e78e7_710e_4b68_9658_d7a2ed67c25a.slice/crio-8238ef37b120eb522276bf24ae78e689193fe8847d3678936fc877c2df40f59d WatchSource:0}: Error finding container 8238ef37b120eb522276bf24ae78e689193fe8847d3678936fc877c2df40f59d: Status 404 returned error can't find the container with id 8238ef37b120eb522276bf24ae78e689193fe8847d3678936fc877c2df40f59d Oct 13 13:22:08 crc kubenswrapper[4684]: E1013 13:22:08.752588 4684 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod152e78e7_710e_4b68_9658_d7a2ed67c25a.slice/crio-f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod152e78e7_710e_4b68_9658_d7a2ed67c25a.slice/crio-conmon-f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f.scope\": RecentStats: unable to find data in memory cache]" Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.185386 4684 generic.go:334] "Generic (PLEG): container finished" podID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerID="f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f" exitCode=0 Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.185563 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9krrk" event={"ID":"152e78e7-710e-4b68-9658-d7a2ed67c25a","Type":"ContainerDied","Data":"f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f"} Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.185591 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9krrk" event={"ID":"152e78e7-710e-4b68-9658-d7a2ed67c25a","Type":"ContainerStarted","Data":"8238ef37b120eb522276bf24ae78e689193fe8847d3678936fc877c2df40f59d"} Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.187653 4684 generic.go:334] "Generic (PLEG): container finished" podID="7ca28019-9f27-416e-a635-d87fb09c55ae" containerID="9993c37055b4739e2176c7c266094f594d843aebcc56b3dc71b28360b7c2e2c6" exitCode=0 Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.187752 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9h7cm" event={"ID":"7ca28019-9f27-416e-a635-d87fb09c55ae","Type":"ContainerDied","Data":"9993c37055b4739e2176c7c266094f594d843aebcc56b3dc71b28360b7c2e2c6"} Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.755945 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.857277 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvf49\" (UniqueName: \"kubernetes.io/projected/9e77f496-7c3a-4769-847c-216d887734f9-kube-api-access-gvf49\") pod \"9e77f496-7c3a-4769-847c-216d887734f9\" (UID: \"9e77f496-7c3a-4769-847c-216d887734f9\") " Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.864270 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e77f496-7c3a-4769-847c-216d887734f9-kube-api-access-gvf49" (OuterVolumeSpecName: "kube-api-access-gvf49") pod "9e77f496-7c3a-4769-847c-216d887734f9" (UID: "9e77f496-7c3a-4769-847c-216d887734f9"). InnerVolumeSpecName "kube-api-access-gvf49". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:09 crc kubenswrapper[4684]: I1013 13:22:09.959333 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvf49\" (UniqueName: \"kubernetes.io/projected/9e77f496-7c3a-4769-847c-216d887734f9-kube-api-access-gvf49\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.195727 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wzmnn" event={"ID":"9e77f496-7c3a-4769-847c-216d887734f9","Type":"ContainerDied","Data":"14d9e64fdfc47b44b9db0769c1b36d8f3a2598c768b7d88f7442208aa4ae571a"} Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.195815 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14d9e64fdfc47b44b9db0769c1b36d8f3a2598c768b7d88f7442208aa4ae571a" Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.195921 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wzmnn" Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.732435 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.873302 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcpfs\" (UniqueName: \"kubernetes.io/projected/7ca28019-9f27-416e-a635-d87fb09c55ae-kube-api-access-qcpfs\") pod \"7ca28019-9f27-416e-a635-d87fb09c55ae\" (UID: \"7ca28019-9f27-416e-a635-d87fb09c55ae\") " Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.876381 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ca28019-9f27-416e-a635-d87fb09c55ae-kube-api-access-qcpfs" (OuterVolumeSpecName: "kube-api-access-qcpfs") pod "7ca28019-9f27-416e-a635-d87fb09c55ae" (UID: "7ca28019-9f27-416e-a635-d87fb09c55ae"). InnerVolumeSpecName "kube-api-access-qcpfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:10 crc kubenswrapper[4684]: I1013 13:22:10.975229 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcpfs\" (UniqueName: \"kubernetes.io/projected/7ca28019-9f27-416e-a635-d87fb09c55ae-kube-api-access-qcpfs\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.208518 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m58km" event={"ID":"b1d3d985-3822-439b-9e3b-629629e83b34","Type":"ContainerStarted","Data":"4d4c5fab28bbd9e1b0c2b554161190e7a383723e6292b2a89e38dbf7f686ea55"} Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.212341 4684 generic.go:334] "Generic (PLEG): container finished" podID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerID="cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48" exitCode=0 Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.212455 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9krrk" event={"ID":"152e78e7-710e-4b68-9658-d7a2ed67c25a","Type":"ContainerDied","Data":"cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48"} Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.214502 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9h7cm" event={"ID":"7ca28019-9f27-416e-a635-d87fb09c55ae","Type":"ContainerDied","Data":"2ef797c339e62c0d6ea146426081c94ce549af3e382bbd093a256532e08c8f9c"} Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.214569 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ef797c339e62c0d6ea146426081c94ce549af3e382bbd093a256532e08c8f9c" Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.214643 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9h7cm" Oct 13 13:22:11 crc kubenswrapper[4684]: I1013 13:22:11.229434 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-m58km" podStartSLOduration=7.409191323 podStartE2EDuration="11.229389265s" podCreationTimestamp="2025-10-13 13:22:00 +0000 UTC" firstStartedPulling="2025-10-13 13:22:06.900150417 +0000 UTC m=+881.467534487" lastFinishedPulling="2025-10-13 13:22:10.720348349 +0000 UTC m=+885.287732429" observedRunningTime="2025-10-13 13:22:11.228836467 +0000 UTC m=+885.796220557" watchObservedRunningTime="2025-10-13 13:22:11.229389265 +0000 UTC m=+885.796773375" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.226739 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9krrk" event={"ID":"152e78e7-710e-4b68-9658-d7a2ed67c25a","Type":"ContainerStarted","Data":"90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be"} Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.251592 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9krrk" podStartSLOduration=3.284672272 podStartE2EDuration="5.251576207s" podCreationTimestamp="2025-10-13 13:22:07 +0000 UTC" firstStartedPulling="2025-10-13 13:22:09.679841231 +0000 UTC m=+884.247225301" lastFinishedPulling="2025-10-13 13:22:11.646745166 +0000 UTC m=+886.214129236" observedRunningTime="2025-10-13 13:22:12.249258222 +0000 UTC m=+886.816642292" watchObservedRunningTime="2025-10-13 13:22:12.251576207 +0000 UTC m=+886.818960277" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.312245 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-2lzdc"] Oct 13 13:22:12 crc kubenswrapper[4684]: E1013 13:22:12.312585 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e77f496-7c3a-4769-847c-216d887734f9" containerName="mariadb-database-create" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.312602 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e77f496-7c3a-4769-847c-216d887734f9" containerName="mariadb-database-create" Oct 13 13:22:12 crc kubenswrapper[4684]: E1013 13:22:12.312623 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca28019-9f27-416e-a635-d87fb09c55ae" containerName="mariadb-database-create" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.312631 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca28019-9f27-416e-a635-d87fb09c55ae" containerName="mariadb-database-create" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.312820 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e77f496-7c3a-4769-847c-216d887734f9" containerName="mariadb-database-create" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.312836 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ca28019-9f27-416e-a635-d87fb09c55ae" containerName="mariadb-database-create" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.313420 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.320689 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2lzdc"] Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.398412 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdtxx\" (UniqueName: \"kubernetes.io/projected/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6-kube-api-access-gdtxx\") pod \"glance-db-create-2lzdc\" (UID: \"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6\") " pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.501305 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdtxx\" (UniqueName: \"kubernetes.io/projected/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6-kube-api-access-gdtxx\") pod \"glance-db-create-2lzdc\" (UID: \"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6\") " pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.525815 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdtxx\" (UniqueName: \"kubernetes.io/projected/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6-kube-api-access-gdtxx\") pod \"glance-db-create-2lzdc\" (UID: \"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6\") " pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:12 crc kubenswrapper[4684]: I1013 13:22:12.629251 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:13 crc kubenswrapper[4684]: I1013 13:22:13.123079 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2lzdc"] Oct 13 13:22:13 crc kubenswrapper[4684]: I1013 13:22:13.236809 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2lzdc" event={"ID":"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6","Type":"ContainerStarted","Data":"cb54cf0c79b83e6ad407d7eb22ab66b24ece7a12c9a20973e11a493e86607832"} Oct 13 13:22:14 crc kubenswrapper[4684]: I1013 13:22:14.249368 4684 generic.go:334] "Generic (PLEG): container finished" podID="bfd212bc-f05d-4d46-97f4-a23ac84b4ca6" containerID="f0bdc6a9ecc252c5b66b04515ef439030db7f082df3039f27a82813010002cb8" exitCode=0 Oct 13 13:22:14 crc kubenswrapper[4684]: I1013 13:22:14.249420 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2lzdc" event={"ID":"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6","Type":"ContainerDied","Data":"f0bdc6a9ecc252c5b66b04515ef439030db7f082df3039f27a82813010002cb8"} Oct 13 13:22:14 crc kubenswrapper[4684]: I1013 13:22:14.819220 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:22:14 crc kubenswrapper[4684]: I1013 13:22:14.883051 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57486d8b9f-fzmv4"] Oct 13 13:22:14 crc kubenswrapper[4684]: I1013 13:22:14.883351 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerName="dnsmasq-dns" containerID="cri-o://e35b4c3e55ea4c27fdb1345b1625da3eb260f3f4ad7b2ec201f802f513f38e14" gracePeriod=10 Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.270703 4684 generic.go:334] "Generic (PLEG): container finished" podID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerID="e35b4c3e55ea4c27fdb1345b1625da3eb260f3f4ad7b2ec201f802f513f38e14" exitCode=0 Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.270904 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" event={"ID":"38c94262-1b0c-4b65-939b-f3ea4171513f","Type":"ContainerDied","Data":"e35b4c3e55ea4c27fdb1345b1625da3eb260f3f4ad7b2ec201f802f513f38e14"} Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.404600 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.484961 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9t29\" (UniqueName: \"kubernetes.io/projected/38c94262-1b0c-4b65-939b-f3ea4171513f-kube-api-access-m9t29\") pod \"38c94262-1b0c-4b65-939b-f3ea4171513f\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.485075 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-config\") pod \"38c94262-1b0c-4b65-939b-f3ea4171513f\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.485117 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-dns-svc\") pod \"38c94262-1b0c-4b65-939b-f3ea4171513f\" (UID: \"38c94262-1b0c-4b65-939b-f3ea4171513f\") " Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.490878 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38c94262-1b0c-4b65-939b-f3ea4171513f-kube-api-access-m9t29" (OuterVolumeSpecName: "kube-api-access-m9t29") pod "38c94262-1b0c-4b65-939b-f3ea4171513f" (UID: "38c94262-1b0c-4b65-939b-f3ea4171513f"). InnerVolumeSpecName "kube-api-access-m9t29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.527600 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-config" (OuterVolumeSpecName: "config") pod "38c94262-1b0c-4b65-939b-f3ea4171513f" (UID: "38c94262-1b0c-4b65-939b-f3ea4171513f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.533675 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38c94262-1b0c-4b65-939b-f3ea4171513f" (UID: "38c94262-1b0c-4b65-939b-f3ea4171513f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.574075 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.586866 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdtxx\" (UniqueName: \"kubernetes.io/projected/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6-kube-api-access-gdtxx\") pod \"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6\" (UID: \"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6\") " Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.588031 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9t29\" (UniqueName: \"kubernetes.io/projected/38c94262-1b0c-4b65-939b-f3ea4171513f-kube-api-access-m9t29\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.588063 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.588075 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38c94262-1b0c-4b65-939b-f3ea4171513f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.592137 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6-kube-api-access-gdtxx" (OuterVolumeSpecName: "kube-api-access-gdtxx") pod "bfd212bc-f05d-4d46-97f4-a23ac84b4ca6" (UID: "bfd212bc-f05d-4d46-97f4-a23ac84b4ca6"). InnerVolumeSpecName "kube-api-access-gdtxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:15 crc kubenswrapper[4684]: I1013 13:22:15.693025 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdtxx\" (UniqueName: \"kubernetes.io/projected/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6-kube-api-access-gdtxx\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.200922 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:16 crc kubenswrapper[4684]: E1013 13:22:16.201121 4684 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 13:22:16 crc kubenswrapper[4684]: E1013 13:22:16.201281 4684 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 13:22:16 crc kubenswrapper[4684]: E1013 13:22:16.201355 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift podName:bca9c4e2-0bbb-4828-bd3a-12c0a75b8946 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:32.201333907 +0000 UTC m=+906.768717977 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift") pod "swift-storage-0" (UID: "bca9c4e2-0bbb-4828-bd3a-12c0a75b8946") : configmap "swift-ring-files" not found Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.279272 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2lzdc" event={"ID":"bfd212bc-f05d-4d46-97f4-a23ac84b4ca6","Type":"ContainerDied","Data":"cb54cf0c79b83e6ad407d7eb22ab66b24ece7a12c9a20973e11a493e86607832"} Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.279310 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb54cf0c79b83e6ad407d7eb22ab66b24ece7a12c9a20973e11a493e86607832" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.279282 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2lzdc" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.281374 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" event={"ID":"38c94262-1b0c-4b65-939b-f3ea4171513f","Type":"ContainerDied","Data":"86524202f713792ef4f528c878a9b5afc4155da502f8536c8e83873e9ef13db7"} Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.281404 4684 scope.go:117] "RemoveContainer" containerID="e35b4c3e55ea4c27fdb1345b1625da3eb260f3f4ad7b2ec201f802f513f38e14" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.281496 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57486d8b9f-fzmv4" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.300105 4684 scope.go:117] "RemoveContainer" containerID="71230176b583517ccaa26a5ca56c01ae9cba8d88119ddefb75a1b794d2c9961c" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.372002 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57486d8b9f-fzmv4"] Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.374310 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57486d8b9f-fzmv4"] Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.857545 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-3fce-account-create-rhspt"] Oct 13 13:22:16 crc kubenswrapper[4684]: E1013 13:22:16.858391 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerName="init" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.858445 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerName="init" Oct 13 13:22:16 crc kubenswrapper[4684]: E1013 13:22:16.858463 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfd212bc-f05d-4d46-97f4-a23ac84b4ca6" containerName="mariadb-database-create" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.858472 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfd212bc-f05d-4d46-97f4-a23ac84b4ca6" containerName="mariadb-database-create" Oct 13 13:22:16 crc kubenswrapper[4684]: E1013 13:22:16.858501 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerName="dnsmasq-dns" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.858510 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerName="dnsmasq-dns" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.863663 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfd212bc-f05d-4d46-97f4-a23ac84b4ca6" containerName="mariadb-database-create" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.863721 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" containerName="dnsmasq-dns" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.864671 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.867307 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.872065 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3fce-account-create-rhspt"] Oct 13 13:22:16 crc kubenswrapper[4684]: I1013 13:22:16.914778 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5bfz\" (UniqueName: \"kubernetes.io/projected/847ba3c2-e9b1-4b08-a008-d984bf6fbf35-kube-api-access-x5bfz\") pod \"keystone-3fce-account-create-rhspt\" (UID: \"847ba3c2-e9b1-4b08-a008-d984bf6fbf35\") " pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.017170 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5bfz\" (UniqueName: \"kubernetes.io/projected/847ba3c2-e9b1-4b08-a008-d984bf6fbf35-kube-api-access-x5bfz\") pod \"keystone-3fce-account-create-rhspt\" (UID: \"847ba3c2-e9b1-4b08-a008-d984bf6fbf35\") " pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.036888 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5bfz\" (UniqueName: \"kubernetes.io/projected/847ba3c2-e9b1-4b08-a008-d984bf6fbf35-kube-api-access-x5bfz\") pod \"keystone-3fce-account-create-rhspt\" (UID: \"847ba3c2-e9b1-4b08-a008-d984bf6fbf35\") " pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.146596 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-08cf-account-create-4hrzk"] Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.147821 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.149167 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.162214 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-08cf-account-create-4hrzk"] Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.189050 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.219753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz4dq\" (UniqueName: \"kubernetes.io/projected/a68a5492-b367-4376-9328-4721993efa19-kube-api-access-lz4dq\") pod \"placement-08cf-account-create-4hrzk\" (UID: \"a68a5492-b367-4376-9328-4721993efa19\") " pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.294128 4684 generic.go:334] "Generic (PLEG): container finished" podID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerID="3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16" exitCode=0 Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.294218 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b29378a-0de1-402e-993a-a83cc3d41b67","Type":"ContainerDied","Data":"3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16"} Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.298192 4684 generic.go:334] "Generic (PLEG): container finished" podID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerID="1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b" exitCode=0 Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.298234 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c869998b-76f5-409d-9ff4-4abe3f7c9289","Type":"ContainerDied","Data":"1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b"} Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.326078 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz4dq\" (UniqueName: \"kubernetes.io/projected/a68a5492-b367-4376-9328-4721993efa19-kube-api-access-lz4dq\") pod \"placement-08cf-account-create-4hrzk\" (UID: \"a68a5492-b367-4376-9328-4721993efa19\") " pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.352624 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz4dq\" (UniqueName: \"kubernetes.io/projected/a68a5492-b367-4376-9328-4721993efa19-kube-api-access-lz4dq\") pod \"placement-08cf-account-create-4hrzk\" (UID: \"a68a5492-b367-4376-9328-4721993efa19\") " pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.500974 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.623839 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3fce-account-create-rhspt"] Oct 13 13:22:17 crc kubenswrapper[4684]: W1013 13:22:17.633884 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod847ba3c2_e9b1_4b08_a008_d984bf6fbf35.slice/crio-b6df6c46c50c19a96182bc4ae1f281a9e4b6b65e71c180fdbabae203d50dab0c WatchSource:0}: Error finding container b6df6c46c50c19a96182bc4ae1f281a9e4b6b65e71c180fdbabae203d50dab0c: Status 404 returned error can't find the container with id b6df6c46c50c19a96182bc4ae1f281a9e4b6b65e71c180fdbabae203d50dab0c Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.754618 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-08cf-account-create-4hrzk"] Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.828393 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.828439 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:17 crc kubenswrapper[4684]: I1013 13:22:17.874882 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.308235 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c869998b-76f5-409d-9ff4-4abe3f7c9289","Type":"ContainerStarted","Data":"4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.309279 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.310136 4684 generic.go:334] "Generic (PLEG): container finished" podID="b1d3d985-3822-439b-9e3b-629629e83b34" containerID="4d4c5fab28bbd9e1b0c2b554161190e7a383723e6292b2a89e38dbf7f686ea55" exitCode=0 Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.310204 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m58km" event={"ID":"b1d3d985-3822-439b-9e3b-629629e83b34","Type":"ContainerDied","Data":"4d4c5fab28bbd9e1b0c2b554161190e7a383723e6292b2a89e38dbf7f686ea55"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.312042 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b29378a-0de1-402e-993a-a83cc3d41b67","Type":"ContainerStarted","Data":"a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.312216 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.313540 4684 generic.go:334] "Generic (PLEG): container finished" podID="a68a5492-b367-4376-9328-4721993efa19" containerID="da439ed2cdba43553d44bdaa42cce2dd97a5477d9e04eca406a370276789357d" exitCode=0 Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.313623 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-08cf-account-create-4hrzk" event={"ID":"a68a5492-b367-4376-9328-4721993efa19","Type":"ContainerDied","Data":"da439ed2cdba43553d44bdaa42cce2dd97a5477d9e04eca406a370276789357d"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.313648 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-08cf-account-create-4hrzk" event={"ID":"a68a5492-b367-4376-9328-4721993efa19","Type":"ContainerStarted","Data":"8efee1118f693ffad5e2799d6201cec1cf497c10c8694b0cf8205a23acbe23d8"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.315513 4684 generic.go:334] "Generic (PLEG): container finished" podID="847ba3c2-e9b1-4b08-a008-d984bf6fbf35" containerID="b9738c706f605757802e626de1c6fbf4289aa110f41359da29ae662cf861c906" exitCode=0 Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.315623 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3fce-account-create-rhspt" event={"ID":"847ba3c2-e9b1-4b08-a008-d984bf6fbf35","Type":"ContainerDied","Data":"b9738c706f605757802e626de1c6fbf4289aa110f41359da29ae662cf861c906"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.315667 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3fce-account-create-rhspt" event={"ID":"847ba3c2-e9b1-4b08-a008-d984bf6fbf35","Type":"ContainerStarted","Data":"b6df6c46c50c19a96182bc4ae1f281a9e4b6b65e71c180fdbabae203d50dab0c"} Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.335274 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=44.649656655 podStartE2EDuration="56.335256436s" podCreationTimestamp="2025-10-13 13:21:22 +0000 UTC" firstStartedPulling="2025-10-13 13:21:30.92619182 +0000 UTC m=+845.493575890" lastFinishedPulling="2025-10-13 13:21:42.611791601 +0000 UTC m=+857.179175671" observedRunningTime="2025-10-13 13:22:18.329531279 +0000 UTC m=+892.896915380" watchObservedRunningTime="2025-10-13 13:22:18.335256436 +0000 UTC m=+892.902640506" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.360422 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.762768674 podStartE2EDuration="56.360394294s" podCreationTimestamp="2025-10-13 13:21:22 +0000 UTC" firstStartedPulling="2025-10-13 13:21:24.947538363 +0000 UTC m=+839.514922433" lastFinishedPulling="2025-10-13 13:21:42.545163983 +0000 UTC m=+857.112548053" observedRunningTime="2025-10-13 13:22:18.358287035 +0000 UTC m=+892.925671105" watchObservedRunningTime="2025-10-13 13:22:18.360394294 +0000 UTC m=+892.927778414" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.364704 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38c94262-1b0c-4b65-939b-f3ea4171513f" path="/var/lib/kubelet/pods/38c94262-1b0c-4b65-939b-f3ea4171513f/volumes" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.375205 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:18 crc kubenswrapper[4684]: I1013 13:22:18.456329 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9krrk"] Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.046142 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.656168 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.762189 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5bfz\" (UniqueName: \"kubernetes.io/projected/847ba3c2-e9b1-4b08-a008-d984bf6fbf35-kube-api-access-x5bfz\") pod \"847ba3c2-e9b1-4b08-a008-d984bf6fbf35\" (UID: \"847ba3c2-e9b1-4b08-a008-d984bf6fbf35\") " Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.768136 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847ba3c2-e9b1-4b08-a008-d984bf6fbf35-kube-api-access-x5bfz" (OuterVolumeSpecName: "kube-api-access-x5bfz") pod "847ba3c2-e9b1-4b08-a008-d984bf6fbf35" (UID: "847ba3c2-e9b1-4b08-a008-d984bf6fbf35"). InnerVolumeSpecName "kube-api-access-x5bfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.825009 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.832653 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.864267 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5bfz\" (UniqueName: \"kubernetes.io/projected/847ba3c2-e9b1-4b08-a008-d984bf6fbf35-kube-api-access-x5bfz\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.964978 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-dispersionconf\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.965068 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz4dq\" (UniqueName: \"kubernetes.io/projected/a68a5492-b367-4376-9328-4721993efa19-kube-api-access-lz4dq\") pod \"a68a5492-b367-4376-9328-4721993efa19\" (UID: \"a68a5492-b367-4376-9328-4721993efa19\") " Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.965094 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-scripts\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.965583 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-ring-data-devices\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.965623 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lhh6\" (UniqueName: \"kubernetes.io/projected/b1d3d985-3822-439b-9e3b-629629e83b34-kube-api-access-6lhh6\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:19 crc kubenswrapper[4684]: I1013 13:22:19.965671 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b1d3d985-3822-439b-9e3b-629629e83b34-etc-swift\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.965700 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.965739 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-combined-ca-bundle\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.966934 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.967352 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1d3d985-3822-439b-9e3b-629629e83b34-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.968766 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a68a5492-b367-4376-9328-4721993efa19-kube-api-access-lz4dq" (OuterVolumeSpecName: "kube-api-access-lz4dq") pod "a68a5492-b367-4376-9328-4721993efa19" (UID: "a68a5492-b367-4376-9328-4721993efa19"). InnerVolumeSpecName "kube-api-access-lz4dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.970599 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1d3d985-3822-439b-9e3b-629629e83b34-kube-api-access-6lhh6" (OuterVolumeSpecName: "kube-api-access-6lhh6") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "kube-api-access-6lhh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.984109 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:19.994456 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: E1013 13:22:20.012929 4684 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf podName:b1d3d985-3822-439b-9e3b-629629e83b34 nodeName:}" failed. No retries permitted until 2025-10-13 13:22:20.512864147 +0000 UTC m=+895.080248227 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "swiftconf" (UniqueName: "kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34") : error deleting /var/lib/kubelet/pods/b1d3d985-3822-439b-9e3b-629629e83b34/volume-subpaths: remove /var/lib/kubelet/pods/b1d3d985-3822-439b-9e3b-629629e83b34/volume-subpaths: no such file or directory Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.013243 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-scripts" (OuterVolumeSpecName: "scripts") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.068968 4684 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.069024 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz4dq\" (UniqueName: \"kubernetes.io/projected/a68a5492-b367-4376-9328-4721993efa19-kube-api-access-lz4dq\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.069039 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.069051 4684 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b1d3d985-3822-439b-9e3b-629629e83b34-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.069066 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lhh6\" (UniqueName: \"kubernetes.io/projected/b1d3d985-3822-439b-9e3b-629629e83b34-kube-api-access-6lhh6\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.069079 4684 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b1d3d985-3822-439b-9e3b-629629e83b34-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.069088 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.332962 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m58km" event={"ID":"b1d3d985-3822-439b-9e3b-629629e83b34","Type":"ContainerDied","Data":"d4baa5297cc3e1a246fac792d826d426d2716eae3e71dbae3dd289939dd801f6"} Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.333014 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4baa5297cc3e1a246fac792d826d426d2716eae3e71dbae3dd289939dd801f6" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.333027 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m58km" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.335069 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-08cf-account-create-4hrzk" event={"ID":"a68a5492-b367-4376-9328-4721993efa19","Type":"ContainerDied","Data":"8efee1118f693ffad5e2799d6201cec1cf497c10c8694b0cf8205a23acbe23d8"} Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.335107 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8efee1118f693ffad5e2799d6201cec1cf497c10c8694b0cf8205a23acbe23d8" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.335088 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-08cf-account-create-4hrzk" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.336873 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3fce-account-create-rhspt" event={"ID":"847ba3c2-e9b1-4b08-a008-d984bf6fbf35","Type":"ContainerDied","Data":"b6df6c46c50c19a96182bc4ae1f281a9e4b6b65e71c180fdbabae203d50dab0c"} Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.336986 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6df6c46c50c19a96182bc4ae1f281a9e4b6b65e71c180fdbabae203d50dab0c" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.336926 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3fce-account-create-rhspt" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.336997 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9krrk" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="registry-server" containerID="cri-o://90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be" gracePeriod=2 Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.581198 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf\") pod \"b1d3d985-3822-439b-9e3b-629629e83b34\" (UID: \"b1d3d985-3822-439b-9e3b-629629e83b34\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.587627 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b1d3d985-3822-439b-9e3b-629629e83b34" (UID: "b1d3d985-3822-439b-9e3b-629629e83b34"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.683245 4684 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b1d3d985-3822-439b-9e3b-629629e83b34-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.824687 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.886247 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-utilities\") pod \"152e78e7-710e-4b68-9658-d7a2ed67c25a\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.886477 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-catalog-content\") pod \"152e78e7-710e-4b68-9658-d7a2ed67c25a\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.886553 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkrrg\" (UniqueName: \"kubernetes.io/projected/152e78e7-710e-4b68-9658-d7a2ed67c25a-kube-api-access-gkrrg\") pod \"152e78e7-710e-4b68-9658-d7a2ed67c25a\" (UID: \"152e78e7-710e-4b68-9658-d7a2ed67c25a\") " Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.887595 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-utilities" (OuterVolumeSpecName: "utilities") pod "152e78e7-710e-4b68-9658-d7a2ed67c25a" (UID: "152e78e7-710e-4b68-9658-d7a2ed67c25a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.899299 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "152e78e7-710e-4b68-9658-d7a2ed67c25a" (UID: "152e78e7-710e-4b68-9658-d7a2ed67c25a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.901635 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152e78e7-710e-4b68-9658-d7a2ed67c25a-kube-api-access-gkrrg" (OuterVolumeSpecName: "kube-api-access-gkrrg") pod "152e78e7-710e-4b68-9658-d7a2ed67c25a" (UID: "152e78e7-710e-4b68-9658-d7a2ed67c25a"). InnerVolumeSpecName "kube-api-access-gkrrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.988587 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.988624 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkrrg\" (UniqueName: \"kubernetes.io/projected/152e78e7-710e-4b68-9658-d7a2ed67c25a-kube-api-access-gkrrg\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:20 crc kubenswrapper[4684]: I1013 13:22:20.988636 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/152e78e7-710e-4b68-9658-d7a2ed67c25a-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.347378 4684 generic.go:334] "Generic (PLEG): container finished" podID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerID="90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be" exitCode=0 Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.347438 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9krrk" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.347478 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9krrk" event={"ID":"152e78e7-710e-4b68-9658-d7a2ed67c25a","Type":"ContainerDied","Data":"90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be"} Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.347842 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9krrk" event={"ID":"152e78e7-710e-4b68-9658-d7a2ed67c25a","Type":"ContainerDied","Data":"8238ef37b120eb522276bf24ae78e689193fe8847d3678936fc877c2df40f59d"} Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.347864 4684 scope.go:117] "RemoveContainer" containerID="90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.364806 4684 scope.go:117] "RemoveContainer" containerID="cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.378727 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9krrk"] Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.384879 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9krrk"] Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.388327 4684 scope.go:117] "RemoveContainer" containerID="f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.407230 4684 scope.go:117] "RemoveContainer" containerID="90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be" Oct 13 13:22:21 crc kubenswrapper[4684]: E1013 13:22:21.407775 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be\": container with ID starting with 90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be not found: ID does not exist" containerID="90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.407812 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be"} err="failed to get container status \"90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be\": rpc error: code = NotFound desc = could not find container \"90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be\": container with ID starting with 90a1941b2aa6ade580dc5a3bf9b87bdcfc026e460026939e31da0d4ddac5b2be not found: ID does not exist" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.407836 4684 scope.go:117] "RemoveContainer" containerID="cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48" Oct 13 13:22:21 crc kubenswrapper[4684]: E1013 13:22:21.408249 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48\": container with ID starting with cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48 not found: ID does not exist" containerID="cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.408276 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48"} err="failed to get container status \"cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48\": rpc error: code = NotFound desc = could not find container \"cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48\": container with ID starting with cb63af2f03c309764b4ac811277b6dc8e1be3c479dcac1c51ab53a57cc72aa48 not found: ID does not exist" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.408292 4684 scope.go:117] "RemoveContainer" containerID="f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f" Oct 13 13:22:21 crc kubenswrapper[4684]: E1013 13:22:21.408698 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f\": container with ID starting with f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f not found: ID does not exist" containerID="f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f" Oct 13 13:22:21 crc kubenswrapper[4684]: I1013 13:22:21.408721 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f"} err="failed to get container status \"f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f\": rpc error: code = NotFound desc = could not find container \"f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f\": container with ID starting with f5512a6e52520a267cec86752804ac3e52245cfbbe5d7afb4f1a190788c7863f not found: ID does not exist" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.359374 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" path="/var/lib/kubelet/pods/152e78e7-710e-4b68-9658-d7a2ed67c25a/volumes" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.456997 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-ffac-account-create-8lkzt"] Oct 13 13:22:22 crc kubenswrapper[4684]: E1013 13:22:22.457287 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="extract-utilities" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457301 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="extract-utilities" Oct 13 13:22:22 crc kubenswrapper[4684]: E1013 13:22:22.457326 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68a5492-b367-4376-9328-4721993efa19" containerName="mariadb-account-create" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457332 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68a5492-b367-4376-9328-4721993efa19" containerName="mariadb-account-create" Oct 13 13:22:22 crc kubenswrapper[4684]: E1013 13:22:22.457340 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="registry-server" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457348 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="registry-server" Oct 13 13:22:22 crc kubenswrapper[4684]: E1013 13:22:22.457359 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847ba3c2-e9b1-4b08-a008-d984bf6fbf35" containerName="mariadb-account-create" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457365 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="847ba3c2-e9b1-4b08-a008-d984bf6fbf35" containerName="mariadb-account-create" Oct 13 13:22:22 crc kubenswrapper[4684]: E1013 13:22:22.457381 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1d3d985-3822-439b-9e3b-629629e83b34" containerName="swift-ring-rebalance" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457388 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1d3d985-3822-439b-9e3b-629629e83b34" containerName="swift-ring-rebalance" Oct 13 13:22:22 crc kubenswrapper[4684]: E1013 13:22:22.457406 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="extract-content" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457414 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="extract-content" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457567 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="847ba3c2-e9b1-4b08-a008-d984bf6fbf35" containerName="mariadb-account-create" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457583 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1d3d985-3822-439b-9e3b-629629e83b34" containerName="swift-ring-rebalance" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457594 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="a68a5492-b367-4376-9328-4721993efa19" containerName="mariadb-account-create" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.457607 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="152e78e7-710e-4b68-9658-d7a2ed67c25a" containerName="registry-server" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.458113 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.460589 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.465869 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ffac-account-create-8lkzt"] Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.511308 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64psm\" (UniqueName: \"kubernetes.io/projected/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6-kube-api-access-64psm\") pod \"glance-ffac-account-create-8lkzt\" (UID: \"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6\") " pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.613128 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64psm\" (UniqueName: \"kubernetes.io/projected/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6-kube-api-access-64psm\") pod \"glance-ffac-account-create-8lkzt\" (UID: \"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6\") " pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.630730 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64psm\" (UniqueName: \"kubernetes.io/projected/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6-kube-api-access-64psm\") pod \"glance-ffac-account-create-8lkzt\" (UID: \"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6\") " pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:22 crc kubenswrapper[4684]: I1013 13:22:22.784127 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.210317 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ffac-account-create-8lkzt"] Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.366542 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ffac-account-create-8lkzt" event={"ID":"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6","Type":"ContainerStarted","Data":"2bc95d753cc3bdb79c2c7cffba6dd39270aa1eee92b20e2782ec25208cae055f"} Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.366592 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ffac-account-create-8lkzt" event={"ID":"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6","Type":"ContainerStarted","Data":"953fe5bc3bf4de4052cb51468a6f7292964a17def8d69288e210eee9473cce13"} Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.381222 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-ffac-account-create-8lkzt" podStartSLOduration=1.381197296 podStartE2EDuration="1.381197296s" podCreationTimestamp="2025-10-13 13:22:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:22:23.380475532 +0000 UTC m=+897.947859602" watchObservedRunningTime="2025-10-13 13:22:23.381197296 +0000 UTC m=+897.948581366" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.438844 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jgthj" podUID="a984144e-7322-4045-a696-7ec4b746e061" containerName="ovn-controller" probeResult="failure" output=< Oct 13 13:22:23 crc kubenswrapper[4684]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 13 13:22:23 crc kubenswrapper[4684]: > Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.481839 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.485070 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-fc6hl" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.718173 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jgthj-config-25dzx"] Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.719615 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.721455 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.735770 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jgthj-config-25dzx"] Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.833483 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-additional-scripts\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.833736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.833848 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlzcr\" (UniqueName: \"kubernetes.io/projected/34825f04-b40a-4274-b682-a33648db56b1-kube-api-access-jlzcr\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.833971 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run-ovn\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.834067 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-log-ovn\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.834173 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-scripts\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.954830 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-additional-scripts\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.954945 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955249 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.954980 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlzcr\" (UniqueName: \"kubernetes.io/projected/34825f04-b40a-4274-b682-a33648db56b1-kube-api-access-jlzcr\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955405 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run-ovn\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955437 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-log-ovn\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955531 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-log-ovn\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955499 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run-ovn\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955597 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-scripts\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.955659 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-additional-scripts\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.957594 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-scripts\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:23 crc kubenswrapper[4684]: I1013 13:22:23.974843 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlzcr\" (UniqueName: \"kubernetes.io/projected/34825f04-b40a-4274-b682-a33648db56b1-kube-api-access-jlzcr\") pod \"ovn-controller-jgthj-config-25dzx\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:24 crc kubenswrapper[4684]: I1013 13:22:24.038101 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:24 crc kubenswrapper[4684]: I1013 13:22:24.374439 4684 generic.go:334] "Generic (PLEG): container finished" podID="9779ffc7-b9ee-41fe-9f44-72dc7e9439b6" containerID="2bc95d753cc3bdb79c2c7cffba6dd39270aa1eee92b20e2782ec25208cae055f" exitCode=0 Oct 13 13:22:24 crc kubenswrapper[4684]: I1013 13:22:24.376029 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ffac-account-create-8lkzt" event={"ID":"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6","Type":"ContainerDied","Data":"2bc95d753cc3bdb79c2c7cffba6dd39270aa1eee92b20e2782ec25208cae055f"} Oct 13 13:22:24 crc kubenswrapper[4684]: I1013 13:22:24.513187 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jgthj-config-25dzx"] Oct 13 13:22:24 crc kubenswrapper[4684]: W1013 13:22:24.514213 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34825f04_b40a_4274_b682_a33648db56b1.slice/crio-0f33589717351bd91886cf234d3738d5b03d40e363a987161261b473d108699b WatchSource:0}: Error finding container 0f33589717351bd91886cf234d3738d5b03d40e363a987161261b473d108699b: Status 404 returned error can't find the container with id 0f33589717351bd91886cf234d3738d5b03d40e363a987161261b473d108699b Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.383274 4684 generic.go:334] "Generic (PLEG): container finished" podID="34825f04-b40a-4274-b682-a33648db56b1" containerID="cb7301f5a63ee4c2a3467ca0ea6f1dc776d1930893b4b15caa401f819af78d53" exitCode=0 Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.383366 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj-config-25dzx" event={"ID":"34825f04-b40a-4274-b682-a33648db56b1","Type":"ContainerDied","Data":"cb7301f5a63ee4c2a3467ca0ea6f1dc776d1930893b4b15caa401f819af78d53"} Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.383665 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj-config-25dzx" event={"ID":"34825f04-b40a-4274-b682-a33648db56b1","Type":"ContainerStarted","Data":"0f33589717351bd91886cf234d3738d5b03d40e363a987161261b473d108699b"} Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.690493 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.790405 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64psm\" (UniqueName: \"kubernetes.io/projected/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6-kube-api-access-64psm\") pod \"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6\" (UID: \"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6\") " Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.795008 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6-kube-api-access-64psm" (OuterVolumeSpecName: "kube-api-access-64psm") pod "9779ffc7-b9ee-41fe-9f44-72dc7e9439b6" (UID: "9779ffc7-b9ee-41fe-9f44-72dc7e9439b6"). InnerVolumeSpecName "kube-api-access-64psm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:25 crc kubenswrapper[4684]: I1013 13:22:25.892168 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64psm\" (UniqueName: \"kubernetes.io/projected/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6-kube-api-access-64psm\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.393589 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ffac-account-create-8lkzt" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.393585 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ffac-account-create-8lkzt" event={"ID":"9779ffc7-b9ee-41fe-9f44-72dc7e9439b6","Type":"ContainerDied","Data":"953fe5bc3bf4de4052cb51468a6f7292964a17def8d69288e210eee9473cce13"} Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.394053 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="953fe5bc3bf4de4052cb51468a6f7292964a17def8d69288e210eee9473cce13" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.744092 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.905847 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-additional-scripts\") pod \"34825f04-b40a-4274-b682-a33648db56b1\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.905914 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-scripts\") pod \"34825f04-b40a-4274-b682-a33648db56b1\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906062 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlzcr\" (UniqueName: \"kubernetes.io/projected/34825f04-b40a-4274-b682-a33648db56b1-kube-api-access-jlzcr\") pod \"34825f04-b40a-4274-b682-a33648db56b1\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906081 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run-ovn\") pod \"34825f04-b40a-4274-b682-a33648db56b1\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906153 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run\") pod \"34825f04-b40a-4274-b682-a33648db56b1\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906182 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-log-ovn\") pod \"34825f04-b40a-4274-b682-a33648db56b1\" (UID: \"34825f04-b40a-4274-b682-a33648db56b1\") " Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906258 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "34825f04-b40a-4274-b682-a33648db56b1" (UID: "34825f04-b40a-4274-b682-a33648db56b1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906333 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run" (OuterVolumeSpecName: "var-run") pod "34825f04-b40a-4274-b682-a33648db56b1" (UID: "34825f04-b40a-4274-b682-a33648db56b1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906340 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "34825f04-b40a-4274-b682-a33648db56b1" (UID: "34825f04-b40a-4274-b682-a33648db56b1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.906454 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "34825f04-b40a-4274-b682-a33648db56b1" (UID: "34825f04-b40a-4274-b682-a33648db56b1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.907201 4684 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.907294 4684 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-run\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.907314 4684 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/34825f04-b40a-4274-b682-a33648db56b1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.907332 4684 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.907209 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-scripts" (OuterVolumeSpecName: "scripts") pod "34825f04-b40a-4274-b682-a33648db56b1" (UID: "34825f04-b40a-4274-b682-a33648db56b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:26 crc kubenswrapper[4684]: I1013 13:22:26.910724 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34825f04-b40a-4274-b682-a33648db56b1-kube-api-access-jlzcr" (OuterVolumeSpecName: "kube-api-access-jlzcr") pod "34825f04-b40a-4274-b682-a33648db56b1" (UID: "34825f04-b40a-4274-b682-a33648db56b1"). InnerVolumeSpecName "kube-api-access-jlzcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.008242 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34825f04-b40a-4274-b682-a33648db56b1-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.008270 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlzcr\" (UniqueName: \"kubernetes.io/projected/34825f04-b40a-4274-b682-a33648db56b1-kube-api-access-jlzcr\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.402570 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj-config-25dzx" event={"ID":"34825f04-b40a-4274-b682-a33648db56b1","Type":"ContainerDied","Data":"0f33589717351bd91886cf234d3738d5b03d40e363a987161261b473d108699b"} Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.402968 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f33589717351bd91886cf234d3738d5b03d40e363a987161261b473d108699b" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.402646 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-25dzx" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.595012 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-hh8w7"] Oct 13 13:22:27 crc kubenswrapper[4684]: E1013 13:22:27.595425 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9779ffc7-b9ee-41fe-9f44-72dc7e9439b6" containerName="mariadb-account-create" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.595453 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="9779ffc7-b9ee-41fe-9f44-72dc7e9439b6" containerName="mariadb-account-create" Oct 13 13:22:27 crc kubenswrapper[4684]: E1013 13:22:27.595488 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34825f04-b40a-4274-b682-a33648db56b1" containerName="ovn-config" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.595497 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="34825f04-b40a-4274-b682-a33648db56b1" containerName="ovn-config" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.595749 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="34825f04-b40a-4274-b682-a33648db56b1" containerName="ovn-config" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.595808 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="9779ffc7-b9ee-41fe-9f44-72dc7e9439b6" containerName="mariadb-account-create" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.596651 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.598730 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8vppj" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.599157 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.614289 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hh8w7"] Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.718970 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-db-sync-config-data\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.719014 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94l7v\" (UniqueName: \"kubernetes.io/projected/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-kube-api-access-94l7v\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.719053 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-config-data\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.719337 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-combined-ca-bundle\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.820876 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-combined-ca-bundle\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.820973 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-db-sync-config-data\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.820997 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94l7v\" (UniqueName: \"kubernetes.io/projected/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-kube-api-access-94l7v\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.821037 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-config-data\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.826140 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jgthj-config-25dzx"] Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.831320 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jgthj-config-25dzx"] Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.833551 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-db-sync-config-data\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.833621 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-config-data\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.835696 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-combined-ca-bundle\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.843414 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94l7v\" (UniqueName: \"kubernetes.io/projected/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-kube-api-access-94l7v\") pod \"glance-db-sync-hh8w7\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.922813 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.939308 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jgthj-config-ntgm8"] Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.941437 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.943305 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 13 13:22:27 crc kubenswrapper[4684]: I1013 13:22:27.954945 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jgthj-config-ntgm8"] Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.023189 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-scripts\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.023260 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run-ovn\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.023338 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.023359 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2czk\" (UniqueName: \"kubernetes.io/projected/143d24b9-3b40-4ca9-98ef-43d6effabe2d-kube-api-access-r2czk\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.023379 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-log-ovn\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.023515 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-additional-scripts\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.125081 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-scripts\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.125517 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run-ovn\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.125595 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.125619 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2czk\" (UniqueName: \"kubernetes.io/projected/143d24b9-3b40-4ca9-98ef-43d6effabe2d-kube-api-access-r2czk\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.125642 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-log-ovn\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.125666 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-additional-scripts\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.126430 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-additional-scripts\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.126678 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-log-ovn\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.126715 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.126726 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run-ovn\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.127506 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-scripts\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.152230 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2czk\" (UniqueName: \"kubernetes.io/projected/143d24b9-3b40-4ca9-98ef-43d6effabe2d-kube-api-access-r2czk\") pod \"ovn-controller-jgthj-config-ntgm8\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.346099 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hh8w7"] Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.348701 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:28 crc kubenswrapper[4684]: W1013 13:22:28.356096 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2c2b0a0_2d1c_4b25_b223_a0ff24d807fe.slice/crio-14cc67a47df25a905328dc7f245921953677752f77591b1445a31530c4f6ee14 WatchSource:0}: Error finding container 14cc67a47df25a905328dc7f245921953677752f77591b1445a31530c4f6ee14: Status 404 returned error can't find the container with id 14cc67a47df25a905328dc7f245921953677752f77591b1445a31530c4f6ee14 Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.362521 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34825f04-b40a-4274-b682-a33648db56b1" path="/var/lib/kubelet/pods/34825f04-b40a-4274-b682-a33648db56b1/volumes" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.422852 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hh8w7" event={"ID":"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe","Type":"ContainerStarted","Data":"14cc67a47df25a905328dc7f245921953677752f77591b1445a31530c4f6ee14"} Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.474722 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jgthj" Oct 13 13:22:28 crc kubenswrapper[4684]: I1013 13:22:28.884217 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jgthj-config-ntgm8"] Oct 13 13:22:29 crc kubenswrapper[4684]: I1013 13:22:29.441347 4684 generic.go:334] "Generic (PLEG): container finished" podID="143d24b9-3b40-4ca9-98ef-43d6effabe2d" containerID="1522ca99fe7e67706cece972b62f359904794a816f17d6b7cbaa22929a8b5b02" exitCode=0 Oct 13 13:22:29 crc kubenswrapper[4684]: I1013 13:22:29.441546 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj-config-ntgm8" event={"ID":"143d24b9-3b40-4ca9-98ef-43d6effabe2d","Type":"ContainerDied","Data":"1522ca99fe7e67706cece972b62f359904794a816f17d6b7cbaa22929a8b5b02"} Oct 13 13:22:29 crc kubenswrapper[4684]: I1013 13:22:29.442047 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj-config-ntgm8" event={"ID":"143d24b9-3b40-4ca9-98ef-43d6effabe2d","Type":"ContainerStarted","Data":"d39b37f6cea49cf6e4cc1d59eb63b5809e8c360538bffc8e139b2db41ed58bbe"} Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.560324 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.560388 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.725161 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.765846 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2czk\" (UniqueName: \"kubernetes.io/projected/143d24b9-3b40-4ca9-98ef-43d6effabe2d-kube-api-access-r2czk\") pod \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766017 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-log-ovn\") pod \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766079 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-scripts\") pod \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766165 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run-ovn\") pod \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766222 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-additional-scripts\") pod \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766270 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run\") pod \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\" (UID: \"143d24b9-3b40-4ca9-98ef-43d6effabe2d\") " Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766529 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "143d24b9-3b40-4ca9-98ef-43d6effabe2d" (UID: "143d24b9-3b40-4ca9-98ef-43d6effabe2d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766635 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run" (OuterVolumeSpecName: "var-run") pod "143d24b9-3b40-4ca9-98ef-43d6effabe2d" (UID: "143d24b9-3b40-4ca9-98ef-43d6effabe2d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.766840 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "143d24b9-3b40-4ca9-98ef-43d6effabe2d" (UID: "143d24b9-3b40-4ca9-98ef-43d6effabe2d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.767381 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "143d24b9-3b40-4ca9-98ef-43d6effabe2d" (UID: "143d24b9-3b40-4ca9-98ef-43d6effabe2d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.768014 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-scripts" (OuterVolumeSpecName: "scripts") pod "143d24b9-3b40-4ca9-98ef-43d6effabe2d" (UID: "143d24b9-3b40-4ca9-98ef-43d6effabe2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.772586 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/143d24b9-3b40-4ca9-98ef-43d6effabe2d-kube-api-access-r2czk" (OuterVolumeSpecName: "kube-api-access-r2czk") pod "143d24b9-3b40-4ca9-98ef-43d6effabe2d" (UID: "143d24b9-3b40-4ca9-98ef-43d6effabe2d"). InnerVolumeSpecName "kube-api-access-r2czk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.867816 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2czk\" (UniqueName: \"kubernetes.io/projected/143d24b9-3b40-4ca9-98ef-43d6effabe2d-kube-api-access-r2czk\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.867846 4684 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.867855 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.867865 4684 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.867876 4684 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/143d24b9-3b40-4ca9-98ef-43d6effabe2d-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:30 crc kubenswrapper[4684]: I1013 13:22:30.867886 4684 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/143d24b9-3b40-4ca9-98ef-43d6effabe2d-var-run\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:31 crc kubenswrapper[4684]: I1013 13:22:31.457764 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jgthj-config-ntgm8" event={"ID":"143d24b9-3b40-4ca9-98ef-43d6effabe2d","Type":"ContainerDied","Data":"d39b37f6cea49cf6e4cc1d59eb63b5809e8c360538bffc8e139b2db41ed58bbe"} Oct 13 13:22:31 crc kubenswrapper[4684]: I1013 13:22:31.457801 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d39b37f6cea49cf6e4cc1d59eb63b5809e8c360538bffc8e139b2db41ed58bbe" Oct 13 13:22:31 crc kubenswrapper[4684]: I1013 13:22:31.457818 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jgthj-config-ntgm8" Oct 13 13:22:31 crc kubenswrapper[4684]: I1013 13:22:31.790033 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jgthj-config-ntgm8"] Oct 13 13:22:31 crc kubenswrapper[4684]: I1013 13:22:31.797043 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jgthj-config-ntgm8"] Oct 13 13:22:32 crc kubenswrapper[4684]: I1013 13:22:32.293009 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:32 crc kubenswrapper[4684]: I1013 13:22:32.299395 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bca9c4e2-0bbb-4828-bd3a-12c0a75b8946-etc-swift\") pod \"swift-storage-0\" (UID: \"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946\") " pod="openstack/swift-storage-0" Oct 13 13:22:32 crc kubenswrapper[4684]: I1013 13:22:32.370773 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="143d24b9-3b40-4ca9-98ef-43d6effabe2d" path="/var/lib/kubelet/pods/143d24b9-3b40-4ca9-98ef-43d6effabe2d/volumes" Oct 13 13:22:32 crc kubenswrapper[4684]: I1013 13:22:32.490156 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 13 13:22:32 crc kubenswrapper[4684]: I1013 13:22:32.974325 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 13 13:22:33 crc kubenswrapper[4684]: I1013 13:22:33.475465 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"c59d08be3e393477c751d80ddecf40e3cf7ae79da01751ad49aa63566df3d407"} Oct 13 13:22:33 crc kubenswrapper[4684]: I1013 13:22:33.910669 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.233449 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-5llnp"] Oct 13 13:22:34 crc kubenswrapper[4684]: E1013 13:22:34.234020 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="143d24b9-3b40-4ca9-98ef-43d6effabe2d" containerName="ovn-config" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.234035 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="143d24b9-3b40-4ca9-98ef-43d6effabe2d" containerName="ovn-config" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.234183 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="143d24b9-3b40-4ca9-98ef-43d6effabe2d" containerName="ovn-config" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.234649 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.243036 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.251292 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-5llnp"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.331017 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pvqr\" (UniqueName: \"kubernetes.io/projected/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd-kube-api-access-8pvqr\") pod \"cinder-db-create-5llnp\" (UID: \"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd\") " pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.340310 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-tg589"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.343407 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-tg589" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.394050 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-tg589"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.437428 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pvqr\" (UniqueName: \"kubernetes.io/projected/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd-kube-api-access-8pvqr\") pod \"cinder-db-create-5llnp\" (UID: \"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd\") " pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.437502 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt7zf\" (UniqueName: \"kubernetes.io/projected/269bbce5-67e7-4869-bda7-876636e2faa4-kube-api-access-jt7zf\") pod \"barbican-db-create-tg589\" (UID: \"269bbce5-67e7-4869-bda7-876636e2faa4\") " pod="openstack/barbican-db-create-tg589" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.464778 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pvqr\" (UniqueName: \"kubernetes.io/projected/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd-kube-api-access-8pvqr\") pod \"cinder-db-create-5llnp\" (UID: \"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd\") " pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.489533 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"4302877a8615ef32f5f1a062004600de8a20630dc6a21b54bb88e49d7a30d2b2"} Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.489574 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"6e09a931399ed1a8ef629f88515c3f7ee47c0b294dff9467423a427d08d29629"} Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.537056 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-mjjht"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.538189 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.538925 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt7zf\" (UniqueName: \"kubernetes.io/projected/269bbce5-67e7-4869-bda7-876636e2faa4-kube-api-access-jt7zf\") pod \"barbican-db-create-tg589\" (UID: \"269bbce5-67e7-4869-bda7-876636e2faa4\") " pod="openstack/barbican-db-create-tg589" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.552791 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mjjht"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.562111 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.565799 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt7zf\" (UniqueName: \"kubernetes.io/projected/269bbce5-67e7-4869-bda7-876636e2faa4-kube-api-access-jt7zf\") pod \"barbican-db-create-tg589\" (UID: \"269bbce5-67e7-4869-bda7-876636e2faa4\") " pod="openstack/barbican-db-create-tg589" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.600035 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4ddbr"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.601319 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.605437 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-68s85" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.605608 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.605738 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.605867 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.619586 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4ddbr"] Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.642723 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-combined-ca-bundle\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.642831 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7hm7\" (UniqueName: \"kubernetes.io/projected/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-kube-api-access-c7hm7\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.642878 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-config-data\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.643059 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rsmw\" (UniqueName: \"kubernetes.io/projected/8c2ba48c-f56e-4f6e-9992-d52a628c86fb-kube-api-access-5rsmw\") pod \"neutron-db-create-mjjht\" (UID: \"8c2ba48c-f56e-4f6e-9992-d52a628c86fb\") " pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.667307 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-tg589" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.745356 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-combined-ca-bundle\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.745429 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7hm7\" (UniqueName: \"kubernetes.io/projected/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-kube-api-access-c7hm7\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.745456 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-config-data\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.745482 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rsmw\" (UniqueName: \"kubernetes.io/projected/8c2ba48c-f56e-4f6e-9992-d52a628c86fb-kube-api-access-5rsmw\") pod \"neutron-db-create-mjjht\" (UID: \"8c2ba48c-f56e-4f6e-9992-d52a628c86fb\") " pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.753747 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-config-data\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.755469 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-combined-ca-bundle\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.761930 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rsmw\" (UniqueName: \"kubernetes.io/projected/8c2ba48c-f56e-4f6e-9992-d52a628c86fb-kube-api-access-5rsmw\") pod \"neutron-db-create-mjjht\" (UID: \"8c2ba48c-f56e-4f6e-9992-d52a628c86fb\") " pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.771388 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7hm7\" (UniqueName: \"kubernetes.io/projected/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-kube-api-access-c7hm7\") pod \"keystone-db-sync-4ddbr\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.863649 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:34 crc kubenswrapper[4684]: I1013 13:22:34.945821 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.123390 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-5llnp"] Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.191768 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-tg589"] Oct 13 13:22:35 crc kubenswrapper[4684]: W1013 13:22:35.207152 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod269bbce5_67e7_4869_bda7_876636e2faa4.slice/crio-62c215e100970a36ea93959caaf86cfaac117c1c98083717f20772050bb1d1ea WatchSource:0}: Error finding container 62c215e100970a36ea93959caaf86cfaac117c1c98083717f20772050bb1d1ea: Status 404 returned error can't find the container with id 62c215e100970a36ea93959caaf86cfaac117c1c98083717f20772050bb1d1ea Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.382042 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mjjht"] Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.462446 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4ddbr"] Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.498000 4684 generic.go:334] "Generic (PLEG): container finished" podID="7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd" containerID="f00b3382948736f20cefdae7267497dd4ed2c0cf4a454c2b1429ed9b1ac3eebc" exitCode=0 Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.498468 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-5llnp" event={"ID":"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd","Type":"ContainerDied","Data":"f00b3382948736f20cefdae7267497dd4ed2c0cf4a454c2b1429ed9b1ac3eebc"} Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.498506 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-5llnp" event={"ID":"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd","Type":"ContainerStarted","Data":"adfbb3a267b0126edab5e5267dfe05c790f3867f1354e8c129407853e5370244"} Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.502122 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"1e2f633ffc1d46a267af58f31a1227e0c606c2c03d5f8ac9c041d3d31c6b00d4"} Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.502164 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"67a5ec83da80fc95abd5ff98e022ed4d1732eaaa370326a6b0797c7456bfa094"} Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.503582 4684 generic.go:334] "Generic (PLEG): container finished" podID="269bbce5-67e7-4869-bda7-876636e2faa4" containerID="398fa3eb9c985fef6389eca9c50733e32393905d74dd4d4bd80254b36dfb97ae" exitCode=0 Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.503628 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-tg589" event={"ID":"269bbce5-67e7-4869-bda7-876636e2faa4","Type":"ContainerDied","Data":"398fa3eb9c985fef6389eca9c50733e32393905d74dd4d4bd80254b36dfb97ae"} Oct 13 13:22:35 crc kubenswrapper[4684]: I1013 13:22:35.503658 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-tg589" event={"ID":"269bbce5-67e7-4869-bda7-876636e2faa4","Type":"ContainerStarted","Data":"62c215e100970a36ea93959caaf86cfaac117c1c98083717f20772050bb1d1ea"} Oct 13 13:22:35 crc kubenswrapper[4684]: W1013 13:22:35.674049 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28fe7aea_16bf_46f3_b8b0_6b322c4ffecf.slice/crio-e568ef12448ecd6481a6339284727d3d1276e46e84b32bbd12091f6fbae52a14 WatchSource:0}: Error finding container e568ef12448ecd6481a6339284727d3d1276e46e84b32bbd12091f6fbae52a14: Status 404 returned error can't find the container with id e568ef12448ecd6481a6339284727d3d1276e46e84b32bbd12091f6fbae52a14 Oct 13 13:22:35 crc kubenswrapper[4684]: W1013 13:22:35.676722 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c2ba48c_f56e_4f6e_9992_d52a628c86fb.slice/crio-f558628f6664704f83dc86a256dbbdb86fca1da28e1487d0904fcbc6e81dc335 WatchSource:0}: Error finding container f558628f6664704f83dc86a256dbbdb86fca1da28e1487d0904fcbc6e81dc335: Status 404 returned error can't find the container with id f558628f6664704f83dc86a256dbbdb86fca1da28e1487d0904fcbc6e81dc335 Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.581602 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"fdb682b63ce197a8ba3b83447372cefd02f2ba18c2be390f5d5f0933c65e7e55"} Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.582069 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"7aa799303374f5ee30589c667669faa58da24795bca8c40364c50b6864fd0924"} Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.582080 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"1b7ccf214319d4a41be5050ad80538a9afd0d940dff5318289977fccdb3cf98d"} Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.584214 4684 generic.go:334] "Generic (PLEG): container finished" podID="8c2ba48c-f56e-4f6e-9992-d52a628c86fb" containerID="a9444f290b3db177eaa26302c17d6046b7b9cae9be686bb47ca1c3a3b11b7111" exitCode=0 Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.584274 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mjjht" event={"ID":"8c2ba48c-f56e-4f6e-9992-d52a628c86fb","Type":"ContainerDied","Data":"a9444f290b3db177eaa26302c17d6046b7b9cae9be686bb47ca1c3a3b11b7111"} Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.584296 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mjjht" event={"ID":"8c2ba48c-f56e-4f6e-9992-d52a628c86fb","Type":"ContainerStarted","Data":"f558628f6664704f83dc86a256dbbdb86fca1da28e1487d0904fcbc6e81dc335"} Oct 13 13:22:36 crc kubenswrapper[4684]: I1013 13:22:36.587521 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ddbr" event={"ID":"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf","Type":"ContainerStarted","Data":"e568ef12448ecd6481a6339284727d3d1276e46e84b32bbd12091f6fbae52a14"} Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.058858 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-tg589" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.060576 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.087313 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt7zf\" (UniqueName: \"kubernetes.io/projected/269bbce5-67e7-4869-bda7-876636e2faa4-kube-api-access-jt7zf\") pod \"269bbce5-67e7-4869-bda7-876636e2faa4\" (UID: \"269bbce5-67e7-4869-bda7-876636e2faa4\") " Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.088087 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pvqr\" (UniqueName: \"kubernetes.io/projected/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd-kube-api-access-8pvqr\") pod \"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd\" (UID: \"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd\") " Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.097355 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd-kube-api-access-8pvqr" (OuterVolumeSpecName: "kube-api-access-8pvqr") pod "7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd" (UID: "7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd"). InnerVolumeSpecName "kube-api-access-8pvqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.100199 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/269bbce5-67e7-4869-bda7-876636e2faa4-kube-api-access-jt7zf" (OuterVolumeSpecName: "kube-api-access-jt7zf") pod "269bbce5-67e7-4869-bda7-876636e2faa4" (UID: "269bbce5-67e7-4869-bda7-876636e2faa4"). InnerVolumeSpecName "kube-api-access-jt7zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.189422 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt7zf\" (UniqueName: \"kubernetes.io/projected/269bbce5-67e7-4869-bda7-876636e2faa4-kube-api-access-jt7zf\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.189451 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pvqr\" (UniqueName: \"kubernetes.io/projected/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd-kube-api-access-8pvqr\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.650615 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-5llnp" event={"ID":"7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd","Type":"ContainerDied","Data":"adfbb3a267b0126edab5e5267dfe05c790f3867f1354e8c129407853e5370244"} Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.650667 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adfbb3a267b0126edab5e5267dfe05c790f3867f1354e8c129407853e5370244" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.650742 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5llnp" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.715006 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"ac847774a5825d6abee93341fda5007b8e873015bb8cf5a79cc64258032e8ada"} Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.735310 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-tg589" Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.736136 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-tg589" event={"ID":"269bbce5-67e7-4869-bda7-876636e2faa4","Type":"ContainerDied","Data":"62c215e100970a36ea93959caaf86cfaac117c1c98083717f20772050bb1d1ea"} Oct 13 13:22:37 crc kubenswrapper[4684]: I1013 13:22:37.736242 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62c215e100970a36ea93959caaf86cfaac117c1c98083717f20772050bb1d1ea" Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.072235 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.106499 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rsmw\" (UniqueName: \"kubernetes.io/projected/8c2ba48c-f56e-4f6e-9992-d52a628c86fb-kube-api-access-5rsmw\") pod \"8c2ba48c-f56e-4f6e-9992-d52a628c86fb\" (UID: \"8c2ba48c-f56e-4f6e-9992-d52a628c86fb\") " Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.120342 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c2ba48c-f56e-4f6e-9992-d52a628c86fb-kube-api-access-5rsmw" (OuterVolumeSpecName: "kube-api-access-5rsmw") pod "8c2ba48c-f56e-4f6e-9992-d52a628c86fb" (UID: "8c2ba48c-f56e-4f6e-9992-d52a628c86fb"). InnerVolumeSpecName "kube-api-access-5rsmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.215710 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rsmw\" (UniqueName: \"kubernetes.io/projected/8c2ba48c-f56e-4f6e-9992-d52a628c86fb-kube-api-access-5rsmw\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.745175 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mjjht" Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.745163 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mjjht" event={"ID":"8c2ba48c-f56e-4f6e-9992-d52a628c86fb","Type":"ContainerDied","Data":"f558628f6664704f83dc86a256dbbdb86fca1da28e1487d0904fcbc6e81dc335"} Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.745331 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f558628f6664704f83dc86a256dbbdb86fca1da28e1487d0904fcbc6e81dc335" Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.751706 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"2c859798a0fc9abf65ac9a108f376b7b0453ec9aef9b9b5528a3319993a83596"} Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.751749 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"a4a2bb0828cab2dfaea74d7c8cdcbdf7d28f5d54eb3983eae1bce178391cfb8d"} Oct 13 13:22:38 crc kubenswrapper[4684]: I1013 13:22:38.751762 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"77021bb53e2c43d52944b682027438c03212c45affce8db42e83027a1709c9bc"} Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.403826 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-5b32-account-create-czfqk"] Oct 13 13:22:44 crc kubenswrapper[4684]: E1013 13:22:44.404928 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.404945 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: E1013 13:22:44.404959 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269bbce5-67e7-4869-bda7-876636e2faa4" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.404966 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="269bbce5-67e7-4869-bda7-876636e2faa4" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: E1013 13:22:44.405003 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c2ba48c-f56e-4f6e-9992-d52a628c86fb" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.405011 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c2ba48c-f56e-4f6e-9992-d52a628c86fb" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.405222 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="269bbce5-67e7-4869-bda7-876636e2faa4" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.405259 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c2ba48c-f56e-4f6e-9992-d52a628c86fb" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.405281 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd" containerName="mariadb-database-create" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.405950 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.408700 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.410959 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5b32-account-create-czfqk"] Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.474359 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1da3-account-create-tczs9"] Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.475493 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.478537 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.483476 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1da3-account-create-tczs9"] Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.519261 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw5gz\" (UniqueName: \"kubernetes.io/projected/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0-kube-api-access-cw5gz\") pod \"barbican-5b32-account-create-czfqk\" (UID: \"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0\") " pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.519309 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvhbn\" (UniqueName: \"kubernetes.io/projected/491293d3-586b-4691-91fd-e1c6e51144bb-kube-api-access-vvhbn\") pod \"cinder-1da3-account-create-tczs9\" (UID: \"491293d3-586b-4691-91fd-e1c6e51144bb\") " pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.621099 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw5gz\" (UniqueName: \"kubernetes.io/projected/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0-kube-api-access-cw5gz\") pod \"barbican-5b32-account-create-czfqk\" (UID: \"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0\") " pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.621147 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvhbn\" (UniqueName: \"kubernetes.io/projected/491293d3-586b-4691-91fd-e1c6e51144bb-kube-api-access-vvhbn\") pod \"cinder-1da3-account-create-tczs9\" (UID: \"491293d3-586b-4691-91fd-e1c6e51144bb\") " pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.639229 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw5gz\" (UniqueName: \"kubernetes.io/projected/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0-kube-api-access-cw5gz\") pod \"barbican-5b32-account-create-czfqk\" (UID: \"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0\") " pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.639459 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvhbn\" (UniqueName: \"kubernetes.io/projected/491293d3-586b-4691-91fd-e1c6e51144bb-kube-api-access-vvhbn\") pod \"cinder-1da3-account-create-tczs9\" (UID: \"491293d3-586b-4691-91fd-e1c6e51144bb\") " pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.724509 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.775043 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f5e-account-create-8zhf2"] Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.776214 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.782148 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.795757 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f5e-account-create-8zhf2"] Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.795832 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.825397 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glbfb\" (UniqueName: \"kubernetes.io/projected/09785215-874e-4d3f-b431-4d4df98e31c8-kube-api-access-glbfb\") pod \"neutron-6f5e-account-create-8zhf2\" (UID: \"09785215-874e-4d3f-b431-4d4df98e31c8\") " pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.927277 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glbfb\" (UniqueName: \"kubernetes.io/projected/09785215-874e-4d3f-b431-4d4df98e31c8-kube-api-access-glbfb\") pod \"neutron-6f5e-account-create-8zhf2\" (UID: \"09785215-874e-4d3f-b431-4d4df98e31c8\") " pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:44 crc kubenswrapper[4684]: I1013 13:22:44.948344 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glbfb\" (UniqueName: \"kubernetes.io/projected/09785215-874e-4d3f-b431-4d4df98e31c8-kube-api-access-glbfb\") pod \"neutron-6f5e-account-create-8zhf2\" (UID: \"09785215-874e-4d3f-b431-4d4df98e31c8\") " pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:45 crc kubenswrapper[4684]: I1013 13:22:45.098165 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.147080 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f5e-account-create-8zhf2"] Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.621705 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1da3-account-create-tczs9"] Oct 13 13:22:47 crc kubenswrapper[4684]: W1013 13:22:47.629552 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod491293d3_586b_4691_91fd_e1c6e51144bb.slice/crio-81f883580c925ac8f80b53e48b49b3f8a9e3804644219461cb61239c27b95ae9 WatchSource:0}: Error finding container 81f883580c925ac8f80b53e48b49b3f8a9e3804644219461cb61239c27b95ae9: Status 404 returned error can't find the container with id 81f883580c925ac8f80b53e48b49b3f8a9e3804644219461cb61239c27b95ae9 Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.633277 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5b32-account-create-czfqk"] Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.828564 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5b32-account-create-czfqk" event={"ID":"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0","Type":"ContainerStarted","Data":"865f5a1f542a780d49253d3d892e774ef646b7cb9fa523018df35f3320a727b2"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.830587 4684 generic.go:334] "Generic (PLEG): container finished" podID="09785215-874e-4d3f-b431-4d4df98e31c8" containerID="b46416504f1b586dc2bb73f9658247786c100b6ec2e7f35fd88ac238039e50f5" exitCode=0 Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.830690 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f5e-account-create-8zhf2" event={"ID":"09785215-874e-4d3f-b431-4d4df98e31c8","Type":"ContainerDied","Data":"b46416504f1b586dc2bb73f9658247786c100b6ec2e7f35fd88ac238039e50f5"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.830736 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f5e-account-create-8zhf2" event={"ID":"09785215-874e-4d3f-b431-4d4df98e31c8","Type":"ContainerStarted","Data":"a2547098f378a1c8945ccd2508f5e290bdcddf805c3eac710b01c9758abd09ba"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.832853 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1da3-account-create-tczs9" event={"ID":"491293d3-586b-4691-91fd-e1c6e51144bb","Type":"ContainerStarted","Data":"81f883580c925ac8f80b53e48b49b3f8a9e3804644219461cb61239c27b95ae9"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.835544 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hh8w7" event={"ID":"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe","Type":"ContainerStarted","Data":"f9469b06a0908d38711c53628bf743fb0c21a42e676091318f92f6db34e196c8"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.837889 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ddbr" event={"ID":"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf","Type":"ContainerStarted","Data":"35c7e4e5887a719c3beddb46adb9bfc2847e91479b1f2a6ac878efe464b2b6df"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.851447 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"bc3ff802b8a20b408c3f56eb3df6d473ab2c56aee1c65c9a78bbfcbc6817bea8"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.851497 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"5ab1a0b791fc03eac901490b7ce47af84ede4cd66d9e4f3ee6ab0dc9e24cb8a3"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.851510 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"21d8e495b91e46c13f5e16e60845cd481346a26827aa2b281da5c6a059febf1b"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.851519 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bca9c4e2-0bbb-4828-bd3a-12c0a75b8946","Type":"ContainerStarted","Data":"35bf1a37bcf46a822bb6ee418db66bf72ae07657ccff82dc896f1bae13335785"} Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.866872 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4ddbr" podStartSLOduration=2.884066967 podStartE2EDuration="13.866851658s" podCreationTimestamp="2025-10-13 13:22:34 +0000 UTC" firstStartedPulling="2025-10-13 13:22:35.676678652 +0000 UTC m=+910.244062722" lastFinishedPulling="2025-10-13 13:22:46.659463343 +0000 UTC m=+921.226847413" observedRunningTime="2025-10-13 13:22:47.866662782 +0000 UTC m=+922.434046872" watchObservedRunningTime="2025-10-13 13:22:47.866851658 +0000 UTC m=+922.434235728" Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.884844 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-hh8w7" podStartSLOduration=2.5706353650000002 podStartE2EDuration="20.884819551s" podCreationTimestamp="2025-10-13 13:22:27 +0000 UTC" firstStartedPulling="2025-10-13 13:22:28.36795379 +0000 UTC m=+902.935337860" lastFinishedPulling="2025-10-13 13:22:46.682137966 +0000 UTC m=+921.249522046" observedRunningTime="2025-10-13 13:22:47.88101904 +0000 UTC m=+922.448403130" watchObservedRunningTime="2025-10-13 13:22:47.884819551 +0000 UTC m=+922.452203631" Oct 13 13:22:47 crc kubenswrapper[4684]: I1013 13:22:47.926082 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=44.418394933 podStartE2EDuration="48.926059087s" podCreationTimestamp="2025-10-13 13:21:59 +0000 UTC" firstStartedPulling="2025-10-13 13:22:32.978301344 +0000 UTC m=+907.545685424" lastFinishedPulling="2025-10-13 13:22:37.485965508 +0000 UTC m=+912.053349578" observedRunningTime="2025-10-13 13:22:47.914077914 +0000 UTC m=+922.481462004" watchObservedRunningTime="2025-10-13 13:22:47.926059087 +0000 UTC m=+922.493443167" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.213760 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b9cc7477c-cd9wz"] Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.217122 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b9cc7477c-cd9wz"] Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.217294 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.221295 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.285029 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7ngr\" (UniqueName: \"kubernetes.io/projected/35d92a7c-488a-4989-b408-9353d77f0416-kube-api-access-n7ngr\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.285090 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-config\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.285114 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-svc\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.285134 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.285161 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.285202 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.386617 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7ngr\" (UniqueName: \"kubernetes.io/projected/35d92a7c-488a-4989-b408-9353d77f0416-kube-api-access-n7ngr\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.386709 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-config\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.386749 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-svc\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.386773 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.386802 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.386846 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.387636 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-config\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.388537 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.388652 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.388932 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.389008 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-svc\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.414078 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7ngr\" (UniqueName: \"kubernetes.io/projected/35d92a7c-488a-4989-b408-9353d77f0416-kube-api-access-n7ngr\") pod \"dnsmasq-dns-6b9cc7477c-cd9wz\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.558097 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.861499 4684 generic.go:334] "Generic (PLEG): container finished" podID="41c878ea-21d2-4f4d-9628-d7bb8a46c1e0" containerID="feb471e2132c1c04f1860174ba3ca61ca7f8301e1c27f216194bd0e88736e020" exitCode=0 Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.861591 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5b32-account-create-czfqk" event={"ID":"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0","Type":"ContainerDied","Data":"feb471e2132c1c04f1860174ba3ca61ca7f8301e1c27f216194bd0e88736e020"} Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.863892 4684 generic.go:334] "Generic (PLEG): container finished" podID="491293d3-586b-4691-91fd-e1c6e51144bb" containerID="78ef1075f0458c623839fab524432d158f3df5c137f0212d7563108e2dca1f9c" exitCode=0 Oct 13 13:22:48 crc kubenswrapper[4684]: I1013 13:22:48.863966 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1da3-account-create-tczs9" event={"ID":"491293d3-586b-4691-91fd-e1c6e51144bb","Type":"ContainerDied","Data":"78ef1075f0458c623839fab524432d158f3df5c137f0212d7563108e2dca1f9c"} Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.009800 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b9cc7477c-cd9wz"] Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.222296 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.402956 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glbfb\" (UniqueName: \"kubernetes.io/projected/09785215-874e-4d3f-b431-4d4df98e31c8-kube-api-access-glbfb\") pod \"09785215-874e-4d3f-b431-4d4df98e31c8\" (UID: \"09785215-874e-4d3f-b431-4d4df98e31c8\") " Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.406415 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09785215-874e-4d3f-b431-4d4df98e31c8-kube-api-access-glbfb" (OuterVolumeSpecName: "kube-api-access-glbfb") pod "09785215-874e-4d3f-b431-4d4df98e31c8" (UID: "09785215-874e-4d3f-b431-4d4df98e31c8"). InnerVolumeSpecName "kube-api-access-glbfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.505040 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glbfb\" (UniqueName: \"kubernetes.io/projected/09785215-874e-4d3f-b431-4d4df98e31c8-kube-api-access-glbfb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.887541 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f5e-account-create-8zhf2" Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.887611 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f5e-account-create-8zhf2" event={"ID":"09785215-874e-4d3f-b431-4d4df98e31c8","Type":"ContainerDied","Data":"a2547098f378a1c8945ccd2508f5e290bdcddf805c3eac710b01c9758abd09ba"} Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.887686 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2547098f378a1c8945ccd2508f5e290bdcddf805c3eac710b01c9758abd09ba" Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.891387 4684 generic.go:334] "Generic (PLEG): container finished" podID="35d92a7c-488a-4989-b408-9353d77f0416" containerID="e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff" exitCode=0 Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.891511 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" event={"ID":"35d92a7c-488a-4989-b408-9353d77f0416","Type":"ContainerDied","Data":"e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff"} Oct 13 13:22:49 crc kubenswrapper[4684]: I1013 13:22:49.891581 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" event={"ID":"35d92a7c-488a-4989-b408-9353d77f0416","Type":"ContainerStarted","Data":"ad0e35befe355b7ea74bd8deae3d86cec87782a996d451793f36bb4ccb0f6bf2"} Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.253012 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.257812 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.424652 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvhbn\" (UniqueName: \"kubernetes.io/projected/491293d3-586b-4691-91fd-e1c6e51144bb-kube-api-access-vvhbn\") pod \"491293d3-586b-4691-91fd-e1c6e51144bb\" (UID: \"491293d3-586b-4691-91fd-e1c6e51144bb\") " Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.424710 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw5gz\" (UniqueName: \"kubernetes.io/projected/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0-kube-api-access-cw5gz\") pod \"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0\" (UID: \"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0\") " Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.430981 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/491293d3-586b-4691-91fd-e1c6e51144bb-kube-api-access-vvhbn" (OuterVolumeSpecName: "kube-api-access-vvhbn") pod "491293d3-586b-4691-91fd-e1c6e51144bb" (UID: "491293d3-586b-4691-91fd-e1c6e51144bb"). InnerVolumeSpecName "kube-api-access-vvhbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.431312 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0-kube-api-access-cw5gz" (OuterVolumeSpecName: "kube-api-access-cw5gz") pod "41c878ea-21d2-4f4d-9628-d7bb8a46c1e0" (UID: "41c878ea-21d2-4f4d-9628-d7bb8a46c1e0"). InnerVolumeSpecName "kube-api-access-cw5gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.527057 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvhbn\" (UniqueName: \"kubernetes.io/projected/491293d3-586b-4691-91fd-e1c6e51144bb-kube-api-access-vvhbn\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.527108 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw5gz\" (UniqueName: \"kubernetes.io/projected/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0-kube-api-access-cw5gz\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.902675 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" event={"ID":"35d92a7c-488a-4989-b408-9353d77f0416","Type":"ContainerStarted","Data":"e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98"} Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.902795 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.905506 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5b32-account-create-czfqk" event={"ID":"41c878ea-21d2-4f4d-9628-d7bb8a46c1e0","Type":"ContainerDied","Data":"865f5a1f542a780d49253d3d892e774ef646b7cb9fa523018df35f3320a727b2"} Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.905554 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="865f5a1f542a780d49253d3d892e774ef646b7cb9fa523018df35f3320a727b2" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.905614 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5b32-account-create-czfqk" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.907247 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1da3-account-create-tczs9" event={"ID":"491293d3-586b-4691-91fd-e1c6e51144bb","Type":"ContainerDied","Data":"81f883580c925ac8f80b53e48b49b3f8a9e3804644219461cb61239c27b95ae9"} Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.907274 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81f883580c925ac8f80b53e48b49b3f8a9e3804644219461cb61239c27b95ae9" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.907320 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1da3-account-create-tczs9" Oct 13 13:22:50 crc kubenswrapper[4684]: I1013 13:22:50.937778 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" podStartSLOduration=2.937752248 podStartE2EDuration="2.937752248s" podCreationTimestamp="2025-10-13 13:22:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:22:50.92529221 +0000 UTC m=+925.492676320" watchObservedRunningTime="2025-10-13 13:22:50.937752248 +0000 UTC m=+925.505136348" Oct 13 13:22:51 crc kubenswrapper[4684]: I1013 13:22:51.917036 4684 generic.go:334] "Generic (PLEG): container finished" podID="28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" containerID="35c7e4e5887a719c3beddb46adb9bfc2847e91479b1f2a6ac878efe464b2b6df" exitCode=0 Oct 13 13:22:51 crc kubenswrapper[4684]: I1013 13:22:51.917140 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ddbr" event={"ID":"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf","Type":"ContainerDied","Data":"35c7e4e5887a719c3beddb46adb9bfc2847e91479b1f2a6ac878efe464b2b6df"} Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.275349 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.381044 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7hm7\" (UniqueName: \"kubernetes.io/projected/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-kube-api-access-c7hm7\") pod \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.381359 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-combined-ca-bundle\") pod \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.381638 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-config-data\") pod \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\" (UID: \"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf\") " Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.389221 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-kube-api-access-c7hm7" (OuterVolumeSpecName: "kube-api-access-c7hm7") pod "28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" (UID: "28fe7aea-16bf-46f3-b8b0-6b322c4ffecf"). InnerVolumeSpecName "kube-api-access-c7hm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.424250 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" (UID: "28fe7aea-16bf-46f3-b8b0-6b322c4ffecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.440109 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-config-data" (OuterVolumeSpecName: "config-data") pod "28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" (UID: "28fe7aea-16bf-46f3-b8b0-6b322c4ffecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.483606 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.483638 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7hm7\" (UniqueName: \"kubernetes.io/projected/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-kube-api-access-c7hm7\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.483653 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.934530 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ddbr" event={"ID":"28fe7aea-16bf-46f3-b8b0-6b322c4ffecf","Type":"ContainerDied","Data":"e568ef12448ecd6481a6339284727d3d1276e46e84b32bbd12091f6fbae52a14"} Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.935203 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e568ef12448ecd6481a6339284727d3d1276e46e84b32bbd12091f6fbae52a14" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.934543 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ddbr" Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.936741 4684 generic.go:334] "Generic (PLEG): container finished" podID="f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" containerID="f9469b06a0908d38711c53628bf743fb0c21a42e676091318f92f6db34e196c8" exitCode=0 Oct 13 13:22:53 crc kubenswrapper[4684]: I1013 13:22:53.937128 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hh8w7" event={"ID":"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe","Type":"ContainerDied","Data":"f9469b06a0908d38711c53628bf743fb0c21a42e676091318f92f6db34e196c8"} Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.111950 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b9cc7477c-cd9wz"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.112231 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" podUID="35d92a7c-488a-4989-b408-9353d77f0416" containerName="dnsmasq-dns" containerID="cri-o://e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98" gracePeriod=10 Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.126573 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hrh8g"] Oct 13 13:22:54 crc kubenswrapper[4684]: E1013 13:22:54.132074 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c878ea-21d2-4f4d-9628-d7bb8a46c1e0" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132102 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c878ea-21d2-4f4d-9628-d7bb8a46c1e0" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: E1013 13:22:54.132132 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491293d3-586b-4691-91fd-e1c6e51144bb" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132138 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="491293d3-586b-4691-91fd-e1c6e51144bb" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: E1013 13:22:54.132150 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09785215-874e-4d3f-b431-4d4df98e31c8" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132163 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="09785215-874e-4d3f-b431-4d4df98e31c8" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: E1013 13:22:54.132171 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" containerName="keystone-db-sync" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132178 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" containerName="keystone-db-sync" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132370 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="491293d3-586b-4691-91fd-e1c6e51144bb" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132401 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="09785215-874e-4d3f-b431-4d4df98e31c8" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132418 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" containerName="keystone-db-sync" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132428 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c878ea-21d2-4f4d-9628-d7bb8a46c1e0" containerName="mariadb-account-create" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.132957 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.139655 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.139839 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.140042 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.140132 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-68s85" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.178593 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77bdbd65c-sx2lb"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.180092 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.210176 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hrh8g"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.227475 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77bdbd65c-sx2lb"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296190 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-config\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296258 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-credential-keys\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296283 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-config-data\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296333 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-scripts\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296388 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9rj4\" (UniqueName: \"kubernetes.io/projected/1ee46dbc-022e-463a-bb44-7ce4adc2b528-kube-api-access-d9rj4\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296430 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8q2q\" (UniqueName: \"kubernetes.io/projected/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-kube-api-access-q8q2q\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296451 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-fernet-keys\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296488 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-nb\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296516 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-swift-storage-0\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296536 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-combined-ca-bundle\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296593 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-svc\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.296630 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-sb\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.401933 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-svc\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402005 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-sb\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402034 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-config\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402070 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-credential-keys\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402089 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-config-data\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402136 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-scripts\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402190 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9rj4\" (UniqueName: \"kubernetes.io/projected/1ee46dbc-022e-463a-bb44-7ce4adc2b528-kube-api-access-d9rj4\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402232 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8q2q\" (UniqueName: \"kubernetes.io/projected/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-kube-api-access-q8q2q\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402256 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-fernet-keys\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402290 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-nb\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402327 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-swift-storage-0\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.402350 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-combined-ca-bundle\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.406912 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.408034 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-combined-ca-bundle\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.414509 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.414694 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-scripts\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.415504 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-svc\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.416214 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-sb\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.416722 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-config\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.420198 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-credential-keys\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.420247 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-nb\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.420503 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-fernet-keys\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.420595 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.420846 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.421125 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-swift-storage-0\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.433524 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-config-data\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.437650 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9rj4\" (UniqueName: \"kubernetes.io/projected/1ee46dbc-022e-463a-bb44-7ce4adc2b528-kube-api-access-d9rj4\") pod \"dnsmasq-dns-77bdbd65c-sx2lb\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.445081 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8q2q\" (UniqueName: \"kubernetes.io/projected/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-kube-api-access-q8q2q\") pod \"keystone-bootstrap-hrh8g\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.474961 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.482299 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.485492 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bdbd65c-sx2lb"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.486207 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503533 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503580 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-scripts\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503638 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p2rc\" (UniqueName: \"kubernetes.io/projected/91117a69-e1f3-4e2d-9973-8c6f758962c2-kube-api-access-9p2rc\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503682 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-config-data\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503717 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503827 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-run-httpd\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.503914 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-log-httpd\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.504982 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-676c67789-psl8v"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.506515 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.537186 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-676c67789-psl8v"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.570657 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-hjlvf"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.571678 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.575116 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-d66tw" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.575433 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.581343 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.606849 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.606915 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-scripts\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.606953 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-config\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607040 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p2rc\" (UniqueName: \"kubernetes.io/projected/91117a69-e1f3-4e2d-9973-8c6f758962c2-kube-api-access-9p2rc\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607080 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-config-data\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607111 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-nb\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607188 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-swift-storage-0\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607233 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-sb\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607315 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-run-httpd\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607354 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-svc\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607386 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hknh\" (UniqueName: \"kubernetes.io/projected/8134a6ec-040a-4f46-9372-08a4bac2ea94-kube-api-access-4hknh\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607421 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-log-httpd\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.607887 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-log-httpd\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.608585 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-run-httpd\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.610149 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.611256 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-scripts\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.611843 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hjlvf"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.612159 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.619982 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-config-data\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.648716 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p2rc\" (UniqueName: \"kubernetes.io/projected/91117a69-e1f3-4e2d-9973-8c6f758962c2-kube-api-access-9p2rc\") pod \"ceilometer-0\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.707207 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.711207 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-svc\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.711367 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hknh\" (UniqueName: \"kubernetes.io/projected/8134a6ec-040a-4f46-9372-08a4bac2ea94-kube-api-access-4hknh\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.711508 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-logs\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.711629 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-scripts\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.711752 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-config\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.711886 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-nb\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.712034 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-swift-storage-0\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.712150 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-sb\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.712261 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs4z8\" (UniqueName: \"kubernetes.io/projected/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-kube-api-access-cs4z8\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.712349 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-config-data\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.712426 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-combined-ca-bundle\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.712422 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-svc\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.714656 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-sb\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.715495 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-swift-storage-0\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.718914 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-config\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.718978 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-nb\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.742301 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hknh\" (UniqueName: \"kubernetes.io/projected/8134a6ec-040a-4f46-9372-08a4bac2ea94-kube-api-access-4hknh\") pod \"dnsmasq-dns-676c67789-psl8v\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.785334 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-5qfl5"] Oct 13 13:22:54 crc kubenswrapper[4684]: E1013 13:22:54.786415 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d92a7c-488a-4989-b408-9353d77f0416" containerName="dnsmasq-dns" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.786450 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d92a7c-488a-4989-b408-9353d77f0416" containerName="dnsmasq-dns" Oct 13 13:22:54 crc kubenswrapper[4684]: E1013 13:22:54.786488 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35d92a7c-488a-4989-b408-9353d77f0416" containerName="init" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.786500 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="35d92a7c-488a-4989-b408-9353d77f0416" containerName="init" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.786704 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="35d92a7c-488a-4989-b408-9353d77f0416" containerName="dnsmasq-dns" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.787392 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.792300 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-tnbh2" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.792466 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.799563 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5qfl5"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.814418 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7ngr\" (UniqueName: \"kubernetes.io/projected/35d92a7c-488a-4989-b408-9353d77f0416-kube-api-access-n7ngr\") pod \"35d92a7c-488a-4989-b408-9353d77f0416\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.814494 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-sb\") pod \"35d92a7c-488a-4989-b408-9353d77f0416\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.814525 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-config\") pod \"35d92a7c-488a-4989-b408-9353d77f0416\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.814547 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-nb\") pod \"35d92a7c-488a-4989-b408-9353d77f0416\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.814643 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-svc\") pod \"35d92a7c-488a-4989-b408-9353d77f0416\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.814677 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-swift-storage-0\") pod \"35d92a7c-488a-4989-b408-9353d77f0416\" (UID: \"35d92a7c-488a-4989-b408-9353d77f0416\") " Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.815030 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs4z8\" (UniqueName: \"kubernetes.io/projected/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-kube-api-access-cs4z8\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.815062 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-config-data\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.815084 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-combined-ca-bundle\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.815148 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-logs\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.815207 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-scripts\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.823441 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35d92a7c-488a-4989-b408-9353d77f0416-kube-api-access-n7ngr" (OuterVolumeSpecName: "kube-api-access-n7ngr") pod "35d92a7c-488a-4989-b408-9353d77f0416" (UID: "35d92a7c-488a-4989-b408-9353d77f0416"). InnerVolumeSpecName "kube-api-access-n7ngr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.825653 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-logs\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.828262 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-scripts\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.832868 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-combined-ca-bundle\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.840032 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-qprxl"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.845645 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.848510 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zc47m" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.851811 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-config-data\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.852745 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.853008 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.865761 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs4z8\" (UniqueName: \"kubernetes.io/projected/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-kube-api-access-cs4z8\") pod \"placement-db-sync-hjlvf\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.870480 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qprxl"] Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.897885 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "35d92a7c-488a-4989-b408-9353d77f0416" (UID: "35d92a7c-488a-4989-b408-9353d77f0416"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.905339 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "35d92a7c-488a-4989-b408-9353d77f0416" (UID: "35d92a7c-488a-4989-b408-9353d77f0416"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.906466 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "35d92a7c-488a-4989-b408-9353d77f0416" (UID: "35d92a7c-488a-4989-b408-9353d77f0416"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.908641 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "35d92a7c-488a-4989-b408-9353d77f0416" (UID: "35d92a7c-488a-4989-b408-9353d77f0416"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917165 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-db-sync-config-data\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917206 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcblg\" (UniqueName: \"kubernetes.io/projected/5f3242f8-a0ba-4799-bd79-a0523603fb37-kube-api-access-dcblg\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917271 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-config-data\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917295 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ds5x\" (UniqueName: \"kubernetes.io/projected/8f47ea9a-7261-4645-bcd9-7abf500d9501-kube-api-access-2ds5x\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917318 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-scripts\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917337 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-combined-ca-bundle\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917363 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-combined-ca-bundle\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917387 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f3242f8-a0ba-4799-bd79-a0523603fb37-etc-machine-id\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917404 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-db-sync-config-data\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917466 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7ngr\" (UniqueName: \"kubernetes.io/projected/35d92a7c-488a-4989-b408-9353d77f0416-kube-api-access-n7ngr\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917477 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917485 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917494 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.917504 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.929066 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.931308 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-config" (OuterVolumeSpecName: "config") pod "35d92a7c-488a-4989-b408-9353d77f0416" (UID: "35d92a7c-488a-4989-b408-9353d77f0416"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.948149 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hjlvf" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.952012 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.966831 4684 generic.go:334] "Generic (PLEG): container finished" podID="35d92a7c-488a-4989-b408-9353d77f0416" containerID="e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98" exitCode=0 Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.967046 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.978504 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" event={"ID":"35d92a7c-488a-4989-b408-9353d77f0416","Type":"ContainerDied","Data":"e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98"} Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.978551 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9cc7477c-cd9wz" event={"ID":"35d92a7c-488a-4989-b408-9353d77f0416","Type":"ContainerDied","Data":"ad0e35befe355b7ea74bd8deae3d86cec87782a996d451793f36bb4ccb0f6bf2"} Oct 13 13:22:54 crc kubenswrapper[4684]: I1013 13:22:54.978583 4684 scope.go:117] "RemoveContainer" containerID="e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.015390 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b9cc7477c-cd9wz"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019136 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-db-sync-config-data\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019183 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcblg\" (UniqueName: \"kubernetes.io/projected/5f3242f8-a0ba-4799-bd79-a0523603fb37-kube-api-access-dcblg\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019265 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-config-data\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019288 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ds5x\" (UniqueName: \"kubernetes.io/projected/8f47ea9a-7261-4645-bcd9-7abf500d9501-kube-api-access-2ds5x\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019320 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-scripts\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019339 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-combined-ca-bundle\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019366 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-combined-ca-bundle\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019397 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f3242f8-a0ba-4799-bd79-a0523603fb37-etc-machine-id\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019421 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-db-sync-config-data\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.019487 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35d92a7c-488a-4989-b408-9353d77f0416-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.020578 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f3242f8-a0ba-4799-bd79-a0523603fb37-etc-machine-id\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.025236 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-combined-ca-bundle\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.028762 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-config-data\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.028776 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-combined-ca-bundle\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.029135 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-db-sync-config-data\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.031147 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-scripts\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.031268 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-db-sync-config-data\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.033581 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b9cc7477c-cd9wz"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.037164 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ds5x\" (UniqueName: \"kubernetes.io/projected/8f47ea9a-7261-4645-bcd9-7abf500d9501-kube-api-access-2ds5x\") pod \"barbican-db-sync-5qfl5\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.055465 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcblg\" (UniqueName: \"kubernetes.io/projected/5f3242f8-a0ba-4799-bd79-a0523603fb37-kube-api-access-dcblg\") pod \"cinder-db-sync-qprxl\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.062404 4684 scope.go:117] "RemoveContainer" containerID="e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.088837 4684 scope.go:117] "RemoveContainer" containerID="e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98" Oct 13 13:22:55 crc kubenswrapper[4684]: E1013 13:22:55.089406 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98\": container with ID starting with e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98 not found: ID does not exist" containerID="e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.089440 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98"} err="failed to get container status \"e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98\": rpc error: code = NotFound desc = could not find container \"e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98\": container with ID starting with e642881a3a8ca1a3c5f4ceb5bd7ef3fe8d5340ddaf44c53ecd5b97a6c4e5ee98 not found: ID does not exist" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.089466 4684 scope.go:117] "RemoveContainer" containerID="e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff" Oct 13 13:22:55 crc kubenswrapper[4684]: E1013 13:22:55.090620 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff\": container with ID starting with e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff not found: ID does not exist" containerID="e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.090645 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff"} err="failed to get container status \"e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff\": rpc error: code = NotFound desc = could not find container \"e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff\": container with ID starting with e8b7de5b7869e1006004976315a31d2abd0e468b378c8c09baa6a1cfdc44a2ff not found: ID does not exist" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.117880 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-6b5hs"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.119153 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.120438 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.132221 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-m666p" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.138104 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.143478 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.148199 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hrh8g"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.171947 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6b5hs"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.172543 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qprxl" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.217535 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bdbd65c-sx2lb"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.240664 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-config\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.240784 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpkns\" (UniqueName: \"kubernetes.io/projected/89604b0f-1f4f-440b-b779-a3d5fe0c4895-kube-api-access-gpkns\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.241065 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-combined-ca-bundle\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.342731 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-combined-ca-bundle\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.342852 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-config\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.342934 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpkns\" (UniqueName: \"kubernetes.io/projected/89604b0f-1f4f-440b-b779-a3d5fe0c4895-kube-api-access-gpkns\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.349836 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-combined-ca-bundle\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.350731 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-config\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.364519 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpkns\" (UniqueName: \"kubernetes.io/projected/89604b0f-1f4f-440b-b779-a3d5fe0c4895-kube-api-access-gpkns\") pod \"neutron-db-sync-6b5hs\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.459252 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.528721 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.622585 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.690384 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hjlvf"] Oct 13 13:22:55 crc kubenswrapper[4684]: W1013 13:22:55.712242 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fd2f1bd_d9a0_4e63_8cba_a5a0225a13d4.slice/crio-919e430d58a29b15747dffe243f8ca37eff3217398f422e41e895d242cff742f WatchSource:0}: Error finding container 919e430d58a29b15747dffe243f8ca37eff3217398f422e41e895d242cff742f: Status 404 returned error can't find the container with id 919e430d58a29b15747dffe243f8ca37eff3217398f422e41e895d242cff742f Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.754069 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-config-data\") pod \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.754215 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94l7v\" (UniqueName: \"kubernetes.io/projected/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-kube-api-access-94l7v\") pod \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.754330 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-db-sync-config-data\") pod \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.754408 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-combined-ca-bundle\") pod \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\" (UID: \"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe\") " Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.761509 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" (UID: "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.766060 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-kube-api-access-94l7v" (OuterVolumeSpecName: "kube-api-access-94l7v") pod "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" (UID: "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe"). InnerVolumeSpecName "kube-api-access-94l7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.769110 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-676c67789-psl8v"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.788195 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" (UID: "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.805366 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-config-data" (OuterVolumeSpecName: "config-data") pod "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" (UID: "f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.856071 4684 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.856108 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.856121 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.856133 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94l7v\" (UniqueName: \"kubernetes.io/projected/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe-kube-api-access-94l7v\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.923467 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5qfl5"] Oct 13 13:22:55 crc kubenswrapper[4684]: I1013 13:22:55.952798 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qprxl"] Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.023928 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676c67789-psl8v" event={"ID":"8134a6ec-040a-4f46-9372-08a4bac2ea94","Type":"ContainerStarted","Data":"21347afe375ba28e84b869a1b8f37fb5aedd1a9fa24a109f1fb4e07e20df3143"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.042822 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerStarted","Data":"1f9a249b5d2f35e5e8f80939c4930ab2727810d363f56c853f87785f23671a0c"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.054519 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hjlvf" event={"ID":"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4","Type":"ContainerStarted","Data":"919e430d58a29b15747dffe243f8ca37eff3217398f422e41e895d242cff742f"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.076709 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrh8g" event={"ID":"50465694-fbcb-4c34-8c81-2fcc4ebd43fd","Type":"ContainerStarted","Data":"35b47892ffc3d492ff650a93f630bb8ea010af74ccef7ed53f3a1aef8dcaf08f"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.076754 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrh8g" event={"ID":"50465694-fbcb-4c34-8c81-2fcc4ebd43fd","Type":"ContainerStarted","Data":"c7f2b905085349e9e149cd5381070329c52b6123716b6af3469665cac0ed2806"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.082613 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hh8w7" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.082633 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hh8w7" event={"ID":"f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe","Type":"ContainerDied","Data":"14cc67a47df25a905328dc7f245921953677752f77591b1445a31530c4f6ee14"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.082672 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14cc67a47df25a905328dc7f245921953677752f77591b1445a31530c4f6ee14" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.086109 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5qfl5" event={"ID":"8f47ea9a-7261-4645-bcd9-7abf500d9501","Type":"ContainerStarted","Data":"5be6ec84215d1c46a6cb61120462ce5c2e254b99bec751cb295fe0324f233154"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.092780 4684 generic.go:334] "Generic (PLEG): container finished" podID="1ee46dbc-022e-463a-bb44-7ce4adc2b528" containerID="b83d179fe4e831d3dfe01f101b5c3f9dcc1e54e8c8c0c6fc4059cd8bbd26a6a5" exitCode=0 Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.092821 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" event={"ID":"1ee46dbc-022e-463a-bb44-7ce4adc2b528","Type":"ContainerDied","Data":"b83d179fe4e831d3dfe01f101b5c3f9dcc1e54e8c8c0c6fc4059cd8bbd26a6a5"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.092845 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" event={"ID":"1ee46dbc-022e-463a-bb44-7ce4adc2b528","Type":"ContainerStarted","Data":"ed081dea2b247df15fc86b0cb844dbbe60759adc135e18c30bd501d65bf8837c"} Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.098735 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6b5hs"] Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.102415 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hrh8g" podStartSLOduration=2.102401388 podStartE2EDuration="2.102401388s" podCreationTimestamp="2025-10-13 13:22:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:22:56.100233289 +0000 UTC m=+930.667617359" watchObservedRunningTime="2025-10-13 13:22:56.102401388 +0000 UTC m=+930.669785458" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.188662 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.380226 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35d92a7c-488a-4989-b408-9353d77f0416" path="/var/lib/kubelet/pods/35d92a7c-488a-4989-b408-9353d77f0416/volumes" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.381624 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-676c67789-psl8v"] Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.411588 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bb74c5949-vwrg8"] Oct 13 13:22:56 crc kubenswrapper[4684]: E1013 13:22:56.412074 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" containerName="glance-db-sync" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.412090 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" containerName="glance-db-sync" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.412321 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" containerName="glance-db-sync" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.413481 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.444774 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bb74c5949-vwrg8"] Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.581735 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-config\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.581821 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-svc\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.581842 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzjll\" (UniqueName: \"kubernetes.io/projected/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-kube-api-access-hzjll\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.581889 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-swift-storage-0\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.581974 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-nb\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.581994 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-sb\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.701130 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-config\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.701228 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-svc\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.701256 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzjll\" (UniqueName: \"kubernetes.io/projected/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-kube-api-access-hzjll\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.701296 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-swift-storage-0\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.701366 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-nb\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.701386 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-sb\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.702295 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-sb\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.702339 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-config\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.702841 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-svc\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.703115 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-swift-storage-0\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.704409 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-nb\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.722302 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzjll\" (UniqueName: \"kubernetes.io/projected/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-kube-api-access-hzjll\") pod \"dnsmasq-dns-bb74c5949-vwrg8\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.745665 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.775088 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.903240 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-sb\") pod \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.903324 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-nb\") pod \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.903858 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-svc\") pod \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.903883 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9rj4\" (UniqueName: \"kubernetes.io/projected/1ee46dbc-022e-463a-bb44-7ce4adc2b528-kube-api-access-d9rj4\") pod \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.903933 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-swift-storage-0\") pod \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.903972 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-config\") pod \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\" (UID: \"1ee46dbc-022e-463a-bb44-7ce4adc2b528\") " Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.909772 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ee46dbc-022e-463a-bb44-7ce4adc2b528-kube-api-access-d9rj4" (OuterVolumeSpecName: "kube-api-access-d9rj4") pod "1ee46dbc-022e-463a-bb44-7ce4adc2b528" (UID: "1ee46dbc-022e-463a-bb44-7ce4adc2b528"). InnerVolumeSpecName "kube-api-access-d9rj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.932831 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1ee46dbc-022e-463a-bb44-7ce4adc2b528" (UID: "1ee46dbc-022e-463a-bb44-7ce4adc2b528"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.937542 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1ee46dbc-022e-463a-bb44-7ce4adc2b528" (UID: "1ee46dbc-022e-463a-bb44-7ce4adc2b528"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.938428 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1ee46dbc-022e-463a-bb44-7ce4adc2b528" (UID: "1ee46dbc-022e-463a-bb44-7ce4adc2b528"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.944238 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1ee46dbc-022e-463a-bb44-7ce4adc2b528" (UID: "1ee46dbc-022e-463a-bb44-7ce4adc2b528"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:56 crc kubenswrapper[4684]: I1013 13:22:56.956434 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-config" (OuterVolumeSpecName: "config") pod "1ee46dbc-022e-463a-bb44-7ce4adc2b528" (UID: "1ee46dbc-022e-463a-bb44-7ce4adc2b528"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.005414 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.005469 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.005481 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.005490 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.005499 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9rj4\" (UniqueName: \"kubernetes.io/projected/1ee46dbc-022e-463a-bb44-7ce4adc2b528-kube-api-access-d9rj4\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.005508 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ee46dbc-022e-463a-bb44-7ce4adc2b528-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.103345 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qprxl" event={"ID":"5f3242f8-a0ba-4799-bd79-a0523603fb37","Type":"ContainerStarted","Data":"fd75ee02e043dfff7bbb1302a2565caec4e28859a4cf475ef17f2f7e642d5d3f"} Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.105679 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6b5hs" event={"ID":"89604b0f-1f4f-440b-b779-a3d5fe0c4895","Type":"ContainerStarted","Data":"3ea011a28c8ae539d6f2995a5c117d74dcb59518f0e6b77df5f2f8b8696b1478"} Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.105742 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6b5hs" event={"ID":"89604b0f-1f4f-440b-b779-a3d5fe0c4895","Type":"ContainerStarted","Data":"eb05a645c993a2996dc9f5418d90b45fc97c4920cf94c5eefdff69e57d07fc18"} Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.111666 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" event={"ID":"1ee46dbc-022e-463a-bb44-7ce4adc2b528","Type":"ContainerDied","Data":"ed081dea2b247df15fc86b0cb844dbbe60759adc135e18c30bd501d65bf8837c"} Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.111680 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77bdbd65c-sx2lb" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.111735 4684 scope.go:117] "RemoveContainer" containerID="b83d179fe4e831d3dfe01f101b5c3f9dcc1e54e8c8c0c6fc4059cd8bbd26a6a5" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.116785 4684 generic.go:334] "Generic (PLEG): container finished" podID="8134a6ec-040a-4f46-9372-08a4bac2ea94" containerID="5a53b00f808d39f7bcc24c104a927c48f25c33a719fe33805d4d3577a84f382b" exitCode=0 Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.116838 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676c67789-psl8v" event={"ID":"8134a6ec-040a-4f46-9372-08a4bac2ea94","Type":"ContainerDied","Data":"5a53b00f808d39f7bcc24c104a927c48f25c33a719fe33805d4d3577a84f382b"} Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.122608 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-6b5hs" podStartSLOduration=2.122584601 podStartE2EDuration="2.122584601s" podCreationTimestamp="2025-10-13 13:22:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:22:57.119873725 +0000 UTC m=+931.687257795" watchObservedRunningTime="2025-10-13 13:22:57.122584601 +0000 UTC m=+931.689968681" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.204983 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77bdbd65c-sx2lb"] Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.216971 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77bdbd65c-sx2lb"] Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.237213 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:22:57 crc kubenswrapper[4684]: E1013 13:22:57.237570 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ee46dbc-022e-463a-bb44-7ce4adc2b528" containerName="init" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.237582 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ee46dbc-022e-463a-bb44-7ce4adc2b528" containerName="init" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.237784 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ee46dbc-022e-463a-bb44-7ce4adc2b528" containerName="init" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.238851 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.240353 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8vppj" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.240958 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.242582 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.256163 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bb74c5949-vwrg8"] Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.264838 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.412210 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.412422 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.415044 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.415349 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-logs\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.415399 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbjr4\" (UniqueName: \"kubernetes.io/projected/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-kube-api-access-hbjr4\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.415439 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.415471 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.515027 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.516538 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517302 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbjr4\" (UniqueName: \"kubernetes.io/projected/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-kube-api-access-hbjr4\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517352 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517384 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517415 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517465 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517488 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517549 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-logs\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.517972 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-logs\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.518584 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.519282 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.520771 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.523656 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.525691 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.528365 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.529151 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.543255 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbjr4\" (UniqueName: \"kubernetes.io/projected/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-kube-api-access-hbjr4\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.559070 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.590668 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.623617 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.623702 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xvsk\" (UniqueName: \"kubernetes.io/projected/701957c2-011b-4d9a-84f2-eac3c13116a9-kube-api-access-2xvsk\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.623775 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-logs\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.624049 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.624080 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.624124 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.624194 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.707307 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.727939 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-config\") pod \"8134a6ec-040a-4f46-9372-08a4bac2ea94\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728026 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-nb\") pod \"8134a6ec-040a-4f46-9372-08a4bac2ea94\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728087 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-swift-storage-0\") pod \"8134a6ec-040a-4f46-9372-08a4bac2ea94\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728209 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-sb\") pod \"8134a6ec-040a-4f46-9372-08a4bac2ea94\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728282 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hknh\" (UniqueName: \"kubernetes.io/projected/8134a6ec-040a-4f46-9372-08a4bac2ea94-kube-api-access-4hknh\") pod \"8134a6ec-040a-4f46-9372-08a4bac2ea94\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728352 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-svc\") pod \"8134a6ec-040a-4f46-9372-08a4bac2ea94\" (UID: \"8134a6ec-040a-4f46-9372-08a4bac2ea94\") " Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728670 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728757 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728805 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xvsk\" (UniqueName: \"kubernetes.io/projected/701957c2-011b-4d9a-84f2-eac3c13116a9-kube-api-access-2xvsk\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.728848 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-logs\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.729015 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.729047 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.729088 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.729234 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.729450 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-logs\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.729556 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.737396 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.738430 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8134a6ec-040a-4f46-9372-08a4bac2ea94-kube-api-access-4hknh" (OuterVolumeSpecName: "kube-api-access-4hknh") pod "8134a6ec-040a-4f46-9372-08a4bac2ea94" (UID: "8134a6ec-040a-4f46-9372-08a4bac2ea94"). InnerVolumeSpecName "kube-api-access-4hknh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.742293 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.744131 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.749187 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xvsk\" (UniqueName: \"kubernetes.io/projected/701957c2-011b-4d9a-84f2-eac3c13116a9-kube-api-access-2xvsk\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.770075 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-config" (OuterVolumeSpecName: "config") pod "8134a6ec-040a-4f46-9372-08a4bac2ea94" (UID: "8134a6ec-040a-4f46-9372-08a4bac2ea94"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.773198 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8134a6ec-040a-4f46-9372-08a4bac2ea94" (UID: "8134a6ec-040a-4f46-9372-08a4bac2ea94"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.777745 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8134a6ec-040a-4f46-9372-08a4bac2ea94" (UID: "8134a6ec-040a-4f46-9372-08a4bac2ea94"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.777890 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8134a6ec-040a-4f46-9372-08a4bac2ea94" (UID: "8134a6ec-040a-4f46-9372-08a4bac2ea94"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.800990 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8134a6ec-040a-4f46-9372-08a4bac2ea94" (UID: "8134a6ec-040a-4f46-9372-08a4bac2ea94"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.801498 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.831788 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.831828 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.831838 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.831848 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hknh\" (UniqueName: \"kubernetes.io/projected/8134a6ec-040a-4f46-9372-08a4bac2ea94-kube-api-access-4hknh\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.831860 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.831868 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8134a6ec-040a-4f46-9372-08a4bac2ea94-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:22:57 crc kubenswrapper[4684]: I1013 13:22:57.903104 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.144365 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-676c67789-psl8v" Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.144437 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-676c67789-psl8v" event={"ID":"8134a6ec-040a-4f46-9372-08a4bac2ea94","Type":"ContainerDied","Data":"21347afe375ba28e84b869a1b8f37fb5aedd1a9fa24a109f1fb4e07e20df3143"} Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.144518 4684 scope.go:117] "RemoveContainer" containerID="5a53b00f808d39f7bcc24c104a927c48f25c33a719fe33805d4d3577a84f382b" Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.167279 4684 generic.go:334] "Generic (PLEG): container finished" podID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerID="dee2a9a4f94d3c8bbb8c86de65cd7fb0441ce8e7a1aeaec809ed60f3d0a167ee" exitCode=0 Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.168343 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" event={"ID":"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b","Type":"ContainerDied","Data":"dee2a9a4f94d3c8bbb8c86de65cd7fb0441ce8e7a1aeaec809ed60f3d0a167ee"} Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.168378 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" event={"ID":"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b","Type":"ContainerStarted","Data":"bf6f9113d3a4421208dcb5edb5053215361dcb4caa3808d57b25c66826c8f12b"} Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.241416 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-676c67789-psl8v"] Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.263361 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-676c67789-psl8v"] Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.372614 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ee46dbc-022e-463a-bb44-7ce4adc2b528" path="/var/lib/kubelet/pods/1ee46dbc-022e-463a-bb44-7ce4adc2b528/volumes" Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.373811 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8134a6ec-040a-4f46-9372-08a4bac2ea94" path="/var/lib/kubelet/pods/8134a6ec-040a-4f46-9372-08a4bac2ea94/volumes" Oct 13 13:22:58 crc kubenswrapper[4684]: W1013 13:22:58.378141 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9f93fef_ae39_4ad8_aa0a_accdca1e5c05.slice/crio-38d4f7671f0e7114ddd9facf812738ffae6a8974ba3e5494dff83c4dd55f2037 WatchSource:0}: Error finding container 38d4f7671f0e7114ddd9facf812738ffae6a8974ba3e5494dff83c4dd55f2037: Status 404 returned error can't find the container with id 38d4f7671f0e7114ddd9facf812738ffae6a8974ba3e5494dff83c4dd55f2037 Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.378492 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:22:58 crc kubenswrapper[4684]: I1013 13:22:58.609724 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:22:59 crc kubenswrapper[4684]: I1013 13:22:59.182276 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"701957c2-011b-4d9a-84f2-eac3c13116a9","Type":"ContainerStarted","Data":"8b7744a3612ddb1513dfbffbaa4e726511e5aada7e521ef8c3573cfc3cc11a10"} Oct 13 13:22:59 crc kubenswrapper[4684]: I1013 13:22:59.187025 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05","Type":"ContainerStarted","Data":"38d4f7671f0e7114ddd9facf812738ffae6a8974ba3e5494dff83c4dd55f2037"} Oct 13 13:23:00 crc kubenswrapper[4684]: I1013 13:23:00.559523 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:23:00 crc kubenswrapper[4684]: I1013 13:23:00.559640 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:23:00 crc kubenswrapper[4684]: I1013 13:23:00.559685 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:23:00 crc kubenswrapper[4684]: I1013 13:23:00.560531 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5279c7d29a155e7a0bc08fab5ed9b611b1802504a86b780e2e40dd66f636409f"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:23:00 crc kubenswrapper[4684]: I1013 13:23:00.560620 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://5279c7d29a155e7a0bc08fab5ed9b611b1802504a86b780e2e40dd66f636409f" gracePeriod=600 Oct 13 13:23:01 crc kubenswrapper[4684]: I1013 13:23:01.215007 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="5279c7d29a155e7a0bc08fab5ed9b611b1802504a86b780e2e40dd66f636409f" exitCode=0 Oct 13 13:23:01 crc kubenswrapper[4684]: I1013 13:23:01.215058 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"5279c7d29a155e7a0bc08fab5ed9b611b1802504a86b780e2e40dd66f636409f"} Oct 13 13:23:01 crc kubenswrapper[4684]: I1013 13:23:01.215095 4684 scope.go:117] "RemoveContainer" containerID="5c52fe2f8e685623bf18216a39b095d01b746cd93787ab68efae18b207ec65af" Oct 13 13:23:02 crc kubenswrapper[4684]: I1013 13:23:02.229700 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05","Type":"ContainerStarted","Data":"32adfde746a20b122633905b5bab0bb38ec81045a83b23bcdd00341c1b5b5871"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.241049 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"701957c2-011b-4d9a-84f2-eac3c13116a9","Type":"ContainerStarted","Data":"31197eeaf6643a1705d7830f805391ce694164f3e37dd7cdb0121ea5190a6625"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.241957 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"701957c2-011b-4d9a-84f2-eac3c13116a9","Type":"ContainerStarted","Data":"4c240f514e90ebbf7e8ebcbea02f6ea90ff973e1f298cd80ac3b53af4537e791"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.257880 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" event={"ID":"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b","Type":"ContainerStarted","Data":"69678d1c25a7e21f8963cde39a381700339f394afc1d77ea7f6f8b9e35ebe63e"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.258538 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.264191 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.264178015 podStartE2EDuration="7.264178015s" podCreationTimestamp="2025-10-13 13:22:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:03.261112457 +0000 UTC m=+937.828496527" watchObservedRunningTime="2025-10-13 13:23:03.264178015 +0000 UTC m=+937.831562085" Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.266347 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05","Type":"ContainerStarted","Data":"041767d9ecc7ce885b2f45a237d15587a0892241d6db180371fe776b81638fd5"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.283312 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"763ba38e9ba892e0c9f5a9e9f4dafff85d37a0067ef41eb06df8ee48a015f12a"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.288138 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" podStartSLOduration=7.288120538 podStartE2EDuration="7.288120538s" podCreationTimestamp="2025-10-13 13:22:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:03.285506845 +0000 UTC m=+937.852890915" watchObservedRunningTime="2025-10-13 13:23:03.288120538 +0000 UTC m=+937.855504608" Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.290511 4684 generic.go:334] "Generic (PLEG): container finished" podID="50465694-fbcb-4c34-8c81-2fcc4ebd43fd" containerID="35b47892ffc3d492ff650a93f630bb8ea010af74ccef7ed53f3a1aef8dcaf08f" exitCode=0 Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.290561 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrh8g" event={"ID":"50465694-fbcb-4c34-8c81-2fcc4ebd43fd","Type":"ContainerDied","Data":"35b47892ffc3d492ff650a93f630bb8ea010af74ccef7ed53f3a1aef8dcaf08f"} Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.361701 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.361679515 podStartE2EDuration="7.361679515s" podCreationTimestamp="2025-10-13 13:22:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:03.340143118 +0000 UTC m=+937.907527198" watchObservedRunningTime="2025-10-13 13:23:03.361679515 +0000 UTC m=+937.929063585" Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.905715 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:23:03 crc kubenswrapper[4684]: I1013 13:23:03.978324 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.308711 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-httpd" containerID="cri-o://041767d9ecc7ce885b2f45a237d15587a0892241d6db180371fe776b81638fd5" gracePeriod=30 Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.308771 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-log" containerID="cri-o://31197eeaf6643a1705d7830f805391ce694164f3e37dd7cdb0121ea5190a6625" gracePeriod=30 Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.308887 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-httpd" containerID="cri-o://4c240f514e90ebbf7e8ebcbea02f6ea90ff973e1f298cd80ac3b53af4537e791" gracePeriod=30 Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.308634 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-log" containerID="cri-o://32adfde746a20b122633905b5bab0bb38ec81045a83b23bcdd00341c1b5b5871" gracePeriod=30 Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.728355 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.821218 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-combined-ca-bundle\") pod \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.821258 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8q2q\" (UniqueName: \"kubernetes.io/projected/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-kube-api-access-q8q2q\") pod \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.821343 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-credential-keys\") pod \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.821427 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-fernet-keys\") pod \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.821471 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-scripts\") pod \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.821505 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-config-data\") pod \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\" (UID: \"50465694-fbcb-4c34-8c81-2fcc4ebd43fd\") " Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.827532 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "50465694-fbcb-4c34-8c81-2fcc4ebd43fd" (UID: "50465694-fbcb-4c34-8c81-2fcc4ebd43fd"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.829140 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-kube-api-access-q8q2q" (OuterVolumeSpecName: "kube-api-access-q8q2q") pod "50465694-fbcb-4c34-8c81-2fcc4ebd43fd" (UID: "50465694-fbcb-4c34-8c81-2fcc4ebd43fd"). InnerVolumeSpecName "kube-api-access-q8q2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.829250 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "50465694-fbcb-4c34-8c81-2fcc4ebd43fd" (UID: "50465694-fbcb-4c34-8c81-2fcc4ebd43fd"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.830605 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-scripts" (OuterVolumeSpecName: "scripts") pod "50465694-fbcb-4c34-8c81-2fcc4ebd43fd" (UID: "50465694-fbcb-4c34-8c81-2fcc4ebd43fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.852848 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50465694-fbcb-4c34-8c81-2fcc4ebd43fd" (UID: "50465694-fbcb-4c34-8c81-2fcc4ebd43fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.904850 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-config-data" (OuterVolumeSpecName: "config-data") pod "50465694-fbcb-4c34-8c81-2fcc4ebd43fd" (UID: "50465694-fbcb-4c34-8c81-2fcc4ebd43fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.923029 4684 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.923058 4684 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.923071 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.923081 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.923092 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:05 crc kubenswrapper[4684]: I1013 13:23:05.923105 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8q2q\" (UniqueName: \"kubernetes.io/projected/50465694-fbcb-4c34-8c81-2fcc4ebd43fd-kube-api-access-q8q2q\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.317085 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrh8g" event={"ID":"50465694-fbcb-4c34-8c81-2fcc4ebd43fd","Type":"ContainerDied","Data":"c7f2b905085349e9e149cd5381070329c52b6123716b6af3469665cac0ed2806"} Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.317109 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrh8g" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.317126 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7f2b905085349e9e149cd5381070329c52b6123716b6af3469665cac0ed2806" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.319272 4684 generic.go:334] "Generic (PLEG): container finished" podID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerID="4c240f514e90ebbf7e8ebcbea02f6ea90ff973e1f298cd80ac3b53af4537e791" exitCode=0 Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.319287 4684 generic.go:334] "Generic (PLEG): container finished" podID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerID="31197eeaf6643a1705d7830f805391ce694164f3e37dd7cdb0121ea5190a6625" exitCode=143 Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.319314 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"701957c2-011b-4d9a-84f2-eac3c13116a9","Type":"ContainerDied","Data":"4c240f514e90ebbf7e8ebcbea02f6ea90ff973e1f298cd80ac3b53af4537e791"} Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.319328 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"701957c2-011b-4d9a-84f2-eac3c13116a9","Type":"ContainerDied","Data":"31197eeaf6643a1705d7830f805391ce694164f3e37dd7cdb0121ea5190a6625"} Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.364574 4684 generic.go:334] "Generic (PLEG): container finished" podID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerID="041767d9ecc7ce885b2f45a237d15587a0892241d6db180371fe776b81638fd5" exitCode=0 Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.364611 4684 generic.go:334] "Generic (PLEG): container finished" podID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerID="32adfde746a20b122633905b5bab0bb38ec81045a83b23bcdd00341c1b5b5871" exitCode=143 Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.371500 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05","Type":"ContainerDied","Data":"041767d9ecc7ce885b2f45a237d15587a0892241d6db180371fe776b81638fd5"} Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.371536 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05","Type":"ContainerDied","Data":"32adfde746a20b122633905b5bab0bb38ec81045a83b23bcdd00341c1b5b5871"} Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.809662 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hrh8g"] Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.816338 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hrh8g"] Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.914495 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-77dxj"] Oct 13 13:23:06 crc kubenswrapper[4684]: E1013 13:23:06.915096 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8134a6ec-040a-4f46-9372-08a4bac2ea94" containerName="init" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.915119 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="8134a6ec-040a-4f46-9372-08a4bac2ea94" containerName="init" Oct 13 13:23:06 crc kubenswrapper[4684]: E1013 13:23:06.915140 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50465694-fbcb-4c34-8c81-2fcc4ebd43fd" containerName="keystone-bootstrap" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.915149 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="50465694-fbcb-4c34-8c81-2fcc4ebd43fd" containerName="keystone-bootstrap" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.915398 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="50465694-fbcb-4c34-8c81-2fcc4ebd43fd" containerName="keystone-bootstrap" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.915425 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="8134a6ec-040a-4f46-9372-08a4bac2ea94" containerName="init" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.916288 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.919045 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.919272 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.919659 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.919945 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-68s85" Oct 13 13:23:06 crc kubenswrapper[4684]: I1013 13:23:06.927153 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-77dxj"] Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.069293 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-fernet-keys\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.069338 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g6jr\" (UniqueName: \"kubernetes.io/projected/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-kube-api-access-4g6jr\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.069448 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-combined-ca-bundle\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.069482 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-credential-keys\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.069508 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-config-data\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.069696 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-scripts\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.171737 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-combined-ca-bundle\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.171797 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-credential-keys\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.171828 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-config-data\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.171870 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-scripts\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.172235 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-fernet-keys\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.172262 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g6jr\" (UniqueName: \"kubernetes.io/projected/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-kube-api-access-4g6jr\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.178088 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-scripts\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.178186 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-credential-keys\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.178646 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-combined-ca-bundle\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.179544 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-fernet-keys\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.180527 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-config-data\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.189370 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g6jr\" (UniqueName: \"kubernetes.io/projected/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-kube-api-access-4g6jr\") pod \"keystone-bootstrap-77dxj\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.234190 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.579921 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.679985 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbjr4\" (UniqueName: \"kubernetes.io/projected/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-kube-api-access-hbjr4\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680055 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-httpd-run\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680120 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-combined-ca-bundle\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680199 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680221 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-scripts\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680256 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-config-data\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680304 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-logs\") pod \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\" (UID: \"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05\") " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680696 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.680812 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-logs" (OuterVolumeSpecName: "logs") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.684758 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-kube-api-access-hbjr4" (OuterVolumeSpecName: "kube-api-access-hbjr4") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "kube-api-access-hbjr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.693635 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.693980 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-scripts" (OuterVolumeSpecName: "scripts") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.706051 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.725990 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-config-data" (OuterVolumeSpecName: "config-data") pod "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" (UID: "b9f93fef-ae39-4ad8-aa0a-accdca1e5c05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782627 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782666 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbjr4\" (UniqueName: \"kubernetes.io/projected/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-kube-api-access-hbjr4\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782681 4684 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782693 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782735 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782746 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.782757 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.806957 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 13 13:23:07 crc kubenswrapper[4684]: I1013 13:23:07.884108 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.362929 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50465694-fbcb-4c34-8c81-2fcc4ebd43fd" path="/var/lib/kubelet/pods/50465694-fbcb-4c34-8c81-2fcc4ebd43fd/volumes" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.381838 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f93fef-ae39-4ad8-aa0a-accdca1e5c05","Type":"ContainerDied","Data":"38d4f7671f0e7114ddd9facf812738ffae6a8974ba3e5494dff83c4dd55f2037"} Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.381947 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.423249 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.440087 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.450964 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:23:08 crc kubenswrapper[4684]: E1013 13:23:08.451672 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-log" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.451695 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-log" Oct 13 13:23:08 crc kubenswrapper[4684]: E1013 13:23:08.451715 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-httpd" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.451725 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-httpd" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.451963 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-log" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.451980 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" containerName="glance-httpd" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.453487 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.457131 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.457793 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.458969 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.595924 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596328 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596374 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596418 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-logs\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596449 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-config-data\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596467 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596499 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tcjv\" (UniqueName: \"kubernetes.io/projected/2ffb05bd-2aa7-44dd-a791-859bf7b47747-kube-api-access-7tcjv\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.596521 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-scripts\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.697797 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-logs\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.697859 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-config-data\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.697886 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.697939 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tcjv\" (UniqueName: \"kubernetes.io/projected/2ffb05bd-2aa7-44dd-a791-859bf7b47747-kube-api-access-7tcjv\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.697963 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-scripts\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.698060 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.698095 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.698144 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.698594 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.698865 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-logs\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.699413 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.706513 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.706565 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-scripts\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.706805 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-config-data\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.713497 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.715911 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tcjv\" (UniqueName: \"kubernetes.io/projected/2ffb05bd-2aa7-44dd-a791-859bf7b47747-kube-api-access-7tcjv\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.723444 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " pod="openstack/glance-default-external-api-0" Oct 13 13:23:08 crc kubenswrapper[4684]: I1013 13:23:08.784721 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.359246 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9f93fef-ae39-4ad8-aa0a-accdca1e5c05" path="/var/lib/kubelet/pods/b9f93fef-ae39-4ad8-aa0a-accdca1e5c05/volumes" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.416551 4684 scope.go:117] "RemoveContainer" containerID="041767d9ecc7ce885b2f45a237d15587a0892241d6db180371fe776b81638fd5" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.485951 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635069 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635147 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xvsk\" (UniqueName: \"kubernetes.io/projected/701957c2-011b-4d9a-84f2-eac3c13116a9-kube-api-access-2xvsk\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635241 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-logs\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635287 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-scripts\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635340 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-config-data\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635362 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-combined-ca-bundle\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.635469 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-httpd-run\") pod \"701957c2-011b-4d9a-84f2-eac3c13116a9\" (UID: \"701957c2-011b-4d9a-84f2-eac3c13116a9\") " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.636251 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.638812 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-logs" (OuterVolumeSpecName: "logs") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.644607 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/701957c2-011b-4d9a-84f2-eac3c13116a9-kube-api-access-2xvsk" (OuterVolumeSpecName: "kube-api-access-2xvsk") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "kube-api-access-2xvsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.646001 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.647522 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-scripts" (OuterVolumeSpecName: "scripts") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.667244 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.689003 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-config-data" (OuterVolumeSpecName: "config-data") pod "701957c2-011b-4d9a-84f2-eac3c13116a9" (UID: "701957c2-011b-4d9a-84f2-eac3c13116a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737435 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737465 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737474 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701957c2-011b-4d9a-84f2-eac3c13116a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737484 4684 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737517 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737525 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xvsk\" (UniqueName: \"kubernetes.io/projected/701957c2-011b-4d9a-84f2-eac3c13116a9-kube-api-access-2xvsk\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.737534 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701957c2-011b-4d9a-84f2-eac3c13116a9-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.753885 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 13 13:23:10 crc kubenswrapper[4684]: I1013 13:23:10.839159 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.421537 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.421560 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"701957c2-011b-4d9a-84f2-eac3c13116a9","Type":"ContainerDied","Data":"8b7744a3612ddb1513dfbffbaa4e726511e5aada7e521ef8c3573cfc3cc11a10"} Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.450755 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.458787 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.478489 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:23:11 crc kubenswrapper[4684]: E1013 13:23:11.478817 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-httpd" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.478832 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-httpd" Oct 13 13:23:11 crc kubenswrapper[4684]: E1013 13:23:11.478841 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-log" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.478848 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-log" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.479013 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-httpd" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.479023 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" containerName="glance-log" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.481010 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.491255 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.491616 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.495816 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.556652 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.556742 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvphh\" (UniqueName: \"kubernetes.io/projected/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-kube-api-access-pvphh\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.556774 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.556854 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.556935 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.556985 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.557002 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.557083 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.658559 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.658941 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.659003 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.659028 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.659112 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.659142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.659174 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvphh\" (UniqueName: \"kubernetes.io/projected/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-kube-api-access-pvphh\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.659200 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.660360 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.661601 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.662114 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.663133 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.664316 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.665310 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.665494 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.679079 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvphh\" (UniqueName: \"kubernetes.io/projected/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-kube-api-access-pvphh\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.687428 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.751402 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.804965 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.813092 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-747494c85c-8wvj7"] Oct 13 13:23:11 crc kubenswrapper[4684]: I1013 13:23:11.813653 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="dnsmasq-dns" containerID="cri-o://2bbdb654d628f0c3d8e14cdd14a20bc78b9939776625b580ad618e23f49dea6d" gracePeriod=10 Oct 13 13:23:12 crc kubenswrapper[4684]: I1013 13:23:12.363045 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="701957c2-011b-4d9a-84f2-eac3c13116a9" path="/var/lib/kubelet/pods/701957c2-011b-4d9a-84f2-eac3c13116a9/volumes" Oct 13 13:23:12 crc kubenswrapper[4684]: I1013 13:23:12.438981 4684 generic.go:334] "Generic (PLEG): container finished" podID="0d3dc831-9857-48cd-818e-cbc797668403" containerID="2bbdb654d628f0c3d8e14cdd14a20bc78b9939776625b580ad618e23f49dea6d" exitCode=0 Oct 13 13:23:12 crc kubenswrapper[4684]: I1013 13:23:12.439026 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" event={"ID":"0d3dc831-9857-48cd-818e-cbc797668403","Type":"ContainerDied","Data":"2bbdb654d628f0c3d8e14cdd14a20bc78b9939776625b580ad618e23f49dea6d"} Oct 13 13:23:14 crc kubenswrapper[4684]: I1013 13:23:14.819423 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Oct 13 13:23:16 crc kubenswrapper[4684]: I1013 13:23:16.469982 4684 generic.go:334] "Generic (PLEG): container finished" podID="89604b0f-1f4f-440b-b779-a3d5fe0c4895" containerID="3ea011a28c8ae539d6f2995a5c117d74dcb59518f0e6b77df5f2f8b8696b1478" exitCode=0 Oct 13 13:23:16 crc kubenswrapper[4684]: I1013 13:23:16.470073 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6b5hs" event={"ID":"89604b0f-1f4f-440b-b779-a3d5fe0c4895","Type":"ContainerDied","Data":"3ea011a28c8ae539d6f2995a5c117d74dcb59518f0e6b77df5f2f8b8696b1478"} Oct 13 13:23:18 crc kubenswrapper[4684]: E1013 13:23:18.786362 4684 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-ceilometer-central:1e4eeec18f8da2b364b39b7a7358aef5" Oct 13 13:23:18 crc kubenswrapper[4684]: E1013 13:23:18.786643 4684 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-ceilometer-central:1e4eeec18f8da2b364b39b7a7358aef5" Oct 13 13:23:18 crc kubenswrapper[4684]: E1013 13:23:18.786778 4684 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-ceilometer-central:1e4eeec18f8da2b364b39b7a7358aef5,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n54h94h59ch57fh56dh87h77h597h55fh678h599h59hfbh66fhfch589h58ch549hcdhb7hd4hfch5ffhf7h57hfdh679h66fh95h9fhc9h64bq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9p2rc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(91117a69-e1f3-4e2d-9973-8c6f758962c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 13:23:19 crc kubenswrapper[4684]: I1013 13:23:19.927179 4684 scope.go:117] "RemoveContainer" containerID="32adfde746a20b122633905b5bab0bb38ec81045a83b23bcdd00341c1b5b5871" Oct 13 13:23:19 crc kubenswrapper[4684]: E1013 13:23:19.942406 4684 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:1e4eeec18f8da2b364b39b7a7358aef5" Oct 13 13:23:19 crc kubenswrapper[4684]: E1013 13:23:19.942470 4684 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:1e4eeec18f8da2b364b39b7a7358aef5" Oct 13 13:23:19 crc kubenswrapper[4684]: E1013 13:23:19.942604 4684 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:1e4eeec18f8da2b364b39b7a7358aef5,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dcblg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-qprxl_openstack(5f3242f8-a0ba-4799-bd79-a0523603fb37): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 13:23:19 crc kubenswrapper[4684]: E1013 13:23:19.944347 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-qprxl" podUID="5f3242f8-a0ba-4799-bd79-a0523603fb37" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.104114 4684 scope.go:117] "RemoveContainer" containerID="4c240f514e90ebbf7e8ebcbea02f6ea90ff973e1f298cd80ac3b53af4537e791" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.232433 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.259074 4684 scope.go:117] "RemoveContainer" containerID="31197eeaf6643a1705d7830f805391ce694164f3e37dd7cdb0121ea5190a6625" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.264272 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.313974 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-nb\") pod \"0d3dc831-9857-48cd-818e-cbc797668403\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314033 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-combined-ca-bundle\") pod \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314070 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xw2p\" (UniqueName: \"kubernetes.io/projected/0d3dc831-9857-48cd-818e-cbc797668403-kube-api-access-9xw2p\") pod \"0d3dc831-9857-48cd-818e-cbc797668403\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314171 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-dns-svc\") pod \"0d3dc831-9857-48cd-818e-cbc797668403\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314202 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-sb\") pod \"0d3dc831-9857-48cd-818e-cbc797668403\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314234 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-config\") pod \"0d3dc831-9857-48cd-818e-cbc797668403\" (UID: \"0d3dc831-9857-48cd-818e-cbc797668403\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314381 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpkns\" (UniqueName: \"kubernetes.io/projected/89604b0f-1f4f-440b-b779-a3d5fe0c4895-kube-api-access-gpkns\") pod \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.314424 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-config\") pod \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\" (UID: \"89604b0f-1f4f-440b-b779-a3d5fe0c4895\") " Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.340617 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3dc831-9857-48cd-818e-cbc797668403-kube-api-access-9xw2p" (OuterVolumeSpecName: "kube-api-access-9xw2p") pod "0d3dc831-9857-48cd-818e-cbc797668403" (UID: "0d3dc831-9857-48cd-818e-cbc797668403"). InnerVolumeSpecName "kube-api-access-9xw2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.344985 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89604b0f-1f4f-440b-b779-a3d5fe0c4895-kube-api-access-gpkns" (OuterVolumeSpecName: "kube-api-access-gpkns") pod "89604b0f-1f4f-440b-b779-a3d5fe0c4895" (UID: "89604b0f-1f4f-440b-b779-a3d5fe0c4895"). InnerVolumeSpecName "kube-api-access-gpkns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.416709 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xw2p\" (UniqueName: \"kubernetes.io/projected/0d3dc831-9857-48cd-818e-cbc797668403-kube-api-access-9xw2p\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.423772 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpkns\" (UniqueName: \"kubernetes.io/projected/89604b0f-1f4f-440b-b779-a3d5fe0c4895-kube-api-access-gpkns\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.466575 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-77dxj"] Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.466689 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89604b0f-1f4f-440b-b779-a3d5fe0c4895" (UID: "89604b0f-1f4f-440b-b779-a3d5fe0c4895"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.484302 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.491659 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0d3dc831-9857-48cd-818e-cbc797668403" (UID: "0d3dc831-9857-48cd-818e-cbc797668403"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.500887 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0d3dc831-9857-48cd-818e-cbc797668403" (UID: "0d3dc831-9857-48cd-818e-cbc797668403"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.503131 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-config" (OuterVolumeSpecName: "config") pod "89604b0f-1f4f-440b-b779-a3d5fe0c4895" (UID: "89604b0f-1f4f-440b-b779-a3d5fe0c4895"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.504393 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-config" (OuterVolumeSpecName: "config") pod "0d3dc831-9857-48cd-818e-cbc797668403" (UID: "0d3dc831-9857-48cd-818e-cbc797668403"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.524814 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hjlvf" event={"ID":"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4","Type":"ContainerStarted","Data":"829f092c2099c4972c9b40e38d85b3b258e27eb96fed5f3b889b333dd7427aed"} Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.525662 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.525688 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.525710 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.525720 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.525729 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89604b0f-1f4f-440b-b779-a3d5fe0c4895-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.529720 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6b5hs" event={"ID":"89604b0f-1f4f-440b-b779-a3d5fe0c4895","Type":"ContainerDied","Data":"eb05a645c993a2996dc9f5418d90b45fc97c4920cf94c5eefdff69e57d07fc18"} Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.529766 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb05a645c993a2996dc9f5418d90b45fc97c4920cf94c5eefdff69e57d07fc18" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.529859 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6b5hs" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.531855 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5qfl5" event={"ID":"8f47ea9a-7261-4645-bcd9-7abf500d9501","Type":"ContainerStarted","Data":"d86bba669b24e0efb4e75b5366b6906169a0aa4f232362b9c8e2f7ddfb6f833c"} Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.535380 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0d3dc831-9857-48cd-818e-cbc797668403" (UID: "0d3dc831-9857-48cd-818e-cbc797668403"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.535949 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" event={"ID":"0d3dc831-9857-48cd-818e-cbc797668403","Type":"ContainerDied","Data":"60157f26235ad84fbd38e0e4b051e3493286263ea496419f0eba580a732feca5"} Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.535985 4684 scope.go:117] "RemoveContainer" containerID="2bbdb654d628f0c3d8e14cdd14a20bc78b9939776625b580ad618e23f49dea6d" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.536065 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.552160 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-hjlvf" podStartSLOduration=2.386319055 podStartE2EDuration="26.552138843s" podCreationTimestamp="2025-10-13 13:22:54 +0000 UTC" firstStartedPulling="2025-10-13 13:22:55.718528312 +0000 UTC m=+930.285912382" lastFinishedPulling="2025-10-13 13:23:19.88434809 +0000 UTC m=+954.451732170" observedRunningTime="2025-10-13 13:23:20.545057306 +0000 UTC m=+955.112441376" watchObservedRunningTime="2025-10-13 13:23:20.552138843 +0000 UTC m=+955.119522933" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.563474 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-5qfl5" podStartSLOduration=2.628728887 podStartE2EDuration="26.563453113s" podCreationTimestamp="2025-10-13 13:22:54 +0000 UTC" firstStartedPulling="2025-10-13 13:22:55.994861397 +0000 UTC m=+930.562245467" lastFinishedPulling="2025-10-13 13:23:19.929585623 +0000 UTC m=+954.496969693" observedRunningTime="2025-10-13 13:23:20.560279442 +0000 UTC m=+955.127663512" watchObservedRunningTime="2025-10-13 13:23:20.563453113 +0000 UTC m=+955.130837183" Oct 13 13:23:20 crc kubenswrapper[4684]: E1013 13:23:20.565540 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:1e4eeec18f8da2b364b39b7a7358aef5\\\"\"" pod="openstack/cinder-db-sync-qprxl" podUID="5f3242f8-a0ba-4799-bd79-a0523603fb37" Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.602683 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.609236 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-747494c85c-8wvj7"] Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.614700 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-747494c85c-8wvj7"] Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.627616 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d3dc831-9857-48cd-818e-cbc797668403-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:20 crc kubenswrapper[4684]: W1013 13:23:20.756216 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e4b1e89_804b_413c_8d0d_7f89ce8f715d.slice/crio-9e6cf633fcb92267bae82df47d90c9dca6108ade12a12d8717764fb5dbc0d95d WatchSource:0}: Error finding container 9e6cf633fcb92267bae82df47d90c9dca6108ade12a12d8717764fb5dbc0d95d: Status 404 returned error can't find the container with id 9e6cf633fcb92267bae82df47d90c9dca6108ade12a12d8717764fb5dbc0d95d Oct 13 13:23:20 crc kubenswrapper[4684]: I1013 13:23:20.777892 4684 scope.go:117] "RemoveContainer" containerID="8a02efc2eecdf385ca3687e8a8aed10bb53cf30451bdd27badfcde46940c5f30" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.608177 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74847f7b97-m7fd7"] Oct 13 13:23:21 crc kubenswrapper[4684]: E1013 13:23:21.627101 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="dnsmasq-dns" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.627161 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="dnsmasq-dns" Oct 13 13:23:21 crc kubenswrapper[4684]: E1013 13:23:21.627183 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="init" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.627190 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="init" Oct 13 13:23:21 crc kubenswrapper[4684]: E1013 13:23:21.627202 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89604b0f-1f4f-440b-b779-a3d5fe0c4895" containerName="neutron-db-sync" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.627208 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="89604b0f-1f4f-440b-b779-a3d5fe0c4895" containerName="neutron-db-sync" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.627457 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="dnsmasq-dns" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.627474 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="89604b0f-1f4f-440b-b779-a3d5fe0c4895" containerName="neutron-db-sync" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.628395 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74847f7b97-m7fd7"] Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.628483 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.646987 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerStarted","Data":"7e2eecca1b38085002d5b6e5c133f0fdd51295f4948a21b5d40b8c73e1a02ec1"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.658583 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b56fc677b-j59pp"] Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.662160 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e4b1e89-804b-413c-8d0d-7f89ce8f715d","Type":"ContainerStarted","Data":"da779bff9234e8b2b0df6f3e100e87fde96f281ff3439e27f903f2341ab4d3a8"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.662202 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e4b1e89-804b-413c-8d0d-7f89ce8f715d","Type":"ContainerStarted","Data":"9e6cf633fcb92267bae82df47d90c9dca6108ade12a12d8717764fb5dbc0d95d"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.662271 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.669975 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.670147 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.670345 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ffb05bd-2aa7-44dd-a791-859bf7b47747","Type":"ContainerStarted","Data":"01dfdd6bd7ba8cdb4ac3f7d9c19e722938ed613611b99affe81a6e24097183f6"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.670390 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ffb05bd-2aa7-44dd-a791-859bf7b47747","Type":"ContainerStarted","Data":"b9363b494822683532faf3efccb587c1e81e6eca30fa3a45216b692c2e18ea20"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.671457 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-m666p" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.671560 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.675122 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-77dxj" event={"ID":"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54","Type":"ContainerStarted","Data":"b56c2d9ac88be19528c7691e4ec0b8289c36b013eeeaf78f989b9f1886313aa0"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.675184 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-77dxj" event={"ID":"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54","Type":"ContainerStarted","Data":"9a9fe7ca00b78c206b0319d08a512365a12c366a7aaff91e35148cdccfca4c30"} Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706185 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpg2w\" (UniqueName: \"kubernetes.io/projected/02bfc416-ee41-4c39-90ff-7debeec43ce5-kube-api-access-gpg2w\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706515 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-config\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706588 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-combined-ca-bundle\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706632 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-svc\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706668 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-swift-storage-0\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706713 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d49jd\" (UniqueName: \"kubernetes.io/projected/74865fc2-5110-4d69-b769-b0c84c802759-kube-api-access-d49jd\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706772 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-ovndb-tls-certs\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706805 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-httpd-config\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706827 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-config\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706926 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-nb\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.706957 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-sb\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.723737 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b56fc677b-j59pp"] Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.734454 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-77dxj" podStartSLOduration=15.734435897000001 podStartE2EDuration="15.734435897s" podCreationTimestamp="2025-10-13 13:23:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:21.708094197 +0000 UTC m=+956.275478277" watchObservedRunningTime="2025-10-13 13:23:21.734435897 +0000 UTC m=+956.301819967" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810037 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-swift-storage-0\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810099 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d49jd\" (UniqueName: \"kubernetes.io/projected/74865fc2-5110-4d69-b769-b0c84c802759-kube-api-access-d49jd\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810134 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-ovndb-tls-certs\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810161 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-httpd-config\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810180 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-config\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810214 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-nb\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810234 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-sb\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810277 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpg2w\" (UniqueName: \"kubernetes.io/projected/02bfc416-ee41-4c39-90ff-7debeec43ce5-kube-api-access-gpg2w\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810304 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-config\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810361 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-combined-ca-bundle\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.810395 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-svc\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.812241 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-svc\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.812402 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-nb\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.812583 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-sb\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.813537 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-config\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.816654 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-combined-ca-bundle\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.817216 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-swift-storage-0\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.817952 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-config\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.818786 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-ovndb-tls-certs\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.825116 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-httpd-config\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.836938 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpg2w\" (UniqueName: \"kubernetes.io/projected/02bfc416-ee41-4c39-90ff-7debeec43ce5-kube-api-access-gpg2w\") pod \"neutron-6b56fc677b-j59pp\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.839959 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d49jd\" (UniqueName: \"kubernetes.io/projected/74865fc2-5110-4d69-b769-b0c84c802759-kube-api-access-d49jd\") pod \"dnsmasq-dns-74847f7b97-m7fd7\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:21 crc kubenswrapper[4684]: I1013 13:23:21.964494 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.005132 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.367105 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d3dc831-9857-48cd-818e-cbc797668403" path="/var/lib/kubelet/pods/0d3dc831-9857-48cd-818e-cbc797668403/volumes" Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.691618 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e4b1e89-804b-413c-8d0d-7f89ce8f715d","Type":"ContainerStarted","Data":"ee6936771997352ccf4f2df55b3b9633e81b83f856e1065eb37b4c3b1b8ddc03"} Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.698481 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ffb05bd-2aa7-44dd-a791-859bf7b47747","Type":"ContainerStarted","Data":"f90f04890d5085d43859257a2af92284eedd52346c1b2550a194f0d9b964f609"} Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.737229 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74847f7b97-m7fd7"] Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.749525 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.749504007 podStartE2EDuration="11.749504007s" podCreationTimestamp="2025-10-13 13:23:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:22.718887811 +0000 UTC m=+957.286271881" watchObservedRunningTime="2025-10-13 13:23:22.749504007 +0000 UTC m=+957.316888087" Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.775284 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b56fc677b-j59pp"] Oct 13 13:23:22 crc kubenswrapper[4684]: I1013 13:23:22.784472 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=14.784454252 podStartE2EDuration="14.784454252s" podCreationTimestamp="2025-10-13 13:23:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:22.766295353 +0000 UTC m=+957.333679423" watchObservedRunningTime="2025-10-13 13:23:22.784454252 +0000 UTC m=+957.351838322" Oct 13 13:23:23 crc kubenswrapper[4684]: I1013 13:23:23.707290 4684 generic.go:334] "Generic (PLEG): container finished" podID="74865fc2-5110-4d69-b769-b0c84c802759" containerID="87134103cd23a720ea011361a3a0fbb826bbc5c1934cb01395c373fb63c27139" exitCode=0 Oct 13 13:23:23 crc kubenswrapper[4684]: I1013 13:23:23.707442 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" event={"ID":"74865fc2-5110-4d69-b769-b0c84c802759","Type":"ContainerDied","Data":"87134103cd23a720ea011361a3a0fbb826bbc5c1934cb01395c373fb63c27139"} Oct 13 13:23:23 crc kubenswrapper[4684]: I1013 13:23:23.707818 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" event={"ID":"74865fc2-5110-4d69-b769-b0c84c802759","Type":"ContainerStarted","Data":"06ac7a6d9a1c490e2aa9ddf23d2773cb0e2a686c3dcab4216d2d8327107f9da0"} Oct 13 13:23:23 crc kubenswrapper[4684]: I1013 13:23:23.715390 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56fc677b-j59pp" event={"ID":"02bfc416-ee41-4c39-90ff-7debeec43ce5","Type":"ContainerStarted","Data":"2ae7c125aa1f81711b939cbbc2db105dada3d6a3ec0bdff28919cad1b29460ba"} Oct 13 13:23:23 crc kubenswrapper[4684]: I1013 13:23:23.715428 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56fc677b-j59pp" event={"ID":"02bfc416-ee41-4c39-90ff-7debeec43ce5","Type":"ContainerStarted","Data":"bf995c528a1c22e147769fd50ed0a26a4133aebe7434a123ce279ce5a7e89f9f"} Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.321820 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-594469df77-tzkgb"] Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.323333 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.328871 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.335049 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.409252 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-594469df77-tzkgb"] Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493487 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-httpd-config\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493607 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-internal-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493656 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-ovndb-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493841 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-combined-ca-bundle\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493859 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-public-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493945 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-config\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.493996 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bcqj\" (UniqueName: \"kubernetes.io/projected/c1c77198-2e43-4230-a37d-48e65e09f466-kube-api-access-6bcqj\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595430 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-internal-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595486 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-ovndb-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595564 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-combined-ca-bundle\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595582 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-public-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595613 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-config\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595643 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bcqj\" (UniqueName: \"kubernetes.io/projected/c1c77198-2e43-4230-a37d-48e65e09f466-kube-api-access-6bcqj\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.595670 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-httpd-config\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.601440 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-public-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.601939 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-internal-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.604140 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-combined-ca-bundle\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.604268 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-httpd-config\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.608577 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-config\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.610464 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1c77198-2e43-4230-a37d-48e65e09f466-ovndb-tls-certs\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.622500 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bcqj\" (UniqueName: \"kubernetes.io/projected/c1c77198-2e43-4230-a37d-48e65e09f466-kube-api-access-6bcqj\") pod \"neutron-594469df77-tzkgb\" (UID: \"c1c77198-2e43-4230-a37d-48e65e09f466\") " pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.669833 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.725320 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" event={"ID":"74865fc2-5110-4d69-b769-b0c84c802759","Type":"ContainerStarted","Data":"cbba4fc2a567d014886b43b4ee5acef1da442aa8c084cfcf09440a8a2f3eb9ff"} Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.725447 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.727611 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56fc677b-j59pp" event={"ID":"02bfc416-ee41-4c39-90ff-7debeec43ce5","Type":"ContainerStarted","Data":"3a538c440199f1d6b939bee45f3df3d443fd3b6c29e047035086bbfa7bfbc86c"} Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.728106 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.733535 4684 generic.go:334] "Generic (PLEG): container finished" podID="6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" containerID="829f092c2099c4972c9b40e38d85b3b258e27eb96fed5f3b889b333dd7427aed" exitCode=0 Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.733571 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hjlvf" event={"ID":"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4","Type":"ContainerDied","Data":"829f092c2099c4972c9b40e38d85b3b258e27eb96fed5f3b889b333dd7427aed"} Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.751269 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" podStartSLOduration=3.751253912 podStartE2EDuration="3.751253912s" podCreationTimestamp="2025-10-13 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:24.747867964 +0000 UTC m=+959.315252034" watchObservedRunningTime="2025-10-13 13:23:24.751253912 +0000 UTC m=+959.318637982" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.794978 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b56fc677b-j59pp" podStartSLOduration=3.794954626 podStartE2EDuration="3.794954626s" podCreationTimestamp="2025-10-13 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:24.783424208 +0000 UTC m=+959.350808278" watchObservedRunningTime="2025-10-13 13:23:24.794954626 +0000 UTC m=+959.362338706" Oct 13 13:23:24 crc kubenswrapper[4684]: I1013 13:23:24.818609 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-747494c85c-8wvj7" podUID="0d3dc831-9857-48cd-818e-cbc797668403" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Oct 13 13:23:25 crc kubenswrapper[4684]: I1013 13:23:25.744528 4684 generic.go:334] "Generic (PLEG): container finished" podID="8f47ea9a-7261-4645-bcd9-7abf500d9501" containerID="d86bba669b24e0efb4e75b5366b6906169a0aa4f232362b9c8e2f7ddfb6f833c" exitCode=0 Oct 13 13:23:25 crc kubenswrapper[4684]: I1013 13:23:25.744665 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5qfl5" event={"ID":"8f47ea9a-7261-4645-bcd9-7abf500d9501","Type":"ContainerDied","Data":"d86bba669b24e0efb4e75b5366b6906169a0aa4f232362b9c8e2f7ddfb6f833c"} Oct 13 13:23:25 crc kubenswrapper[4684]: I1013 13:23:25.748152 4684 generic.go:334] "Generic (PLEG): container finished" podID="2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" containerID="b56c2d9ac88be19528c7691e4ec0b8289c36b013eeeaf78f989b9f1886313aa0" exitCode=0 Oct 13 13:23:25 crc kubenswrapper[4684]: I1013 13:23:25.748273 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-77dxj" event={"ID":"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54","Type":"ContainerDied","Data":"b56c2d9ac88be19528c7691e4ec0b8289c36b013eeeaf78f989b9f1886313aa0"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.011695 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hjlvf" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.030964 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.076189 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159193 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-config-data\") pod \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159236 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-combined-ca-bundle\") pod \"8f47ea9a-7261-4645-bcd9-7abf500d9501\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159271 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-fernet-keys\") pod \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159289 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-scripts\") pod \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159343 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g6jr\" (UniqueName: \"kubernetes.io/projected/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-kube-api-access-4g6jr\") pod \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159364 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-logs\") pod \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159387 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-combined-ca-bundle\") pod \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159436 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ds5x\" (UniqueName: \"kubernetes.io/projected/8f47ea9a-7261-4645-bcd9-7abf500d9501-kube-api-access-2ds5x\") pod \"8f47ea9a-7261-4645-bcd9-7abf500d9501\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159458 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-scripts\") pod \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159486 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-combined-ca-bundle\") pod \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159512 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-config-data\") pod \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159527 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs4z8\" (UniqueName: \"kubernetes.io/projected/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-kube-api-access-cs4z8\") pod \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\" (UID: \"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159567 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-db-sync-config-data\") pod \"8f47ea9a-7261-4645-bcd9-7abf500d9501\" (UID: \"8f47ea9a-7261-4645-bcd9-7abf500d9501\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.159619 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-credential-keys\") pod \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\" (UID: \"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54\") " Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.160442 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-logs" (OuterVolumeSpecName: "logs") pod "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" (UID: "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.164270 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f47ea9a-7261-4645-bcd9-7abf500d9501-kube-api-access-2ds5x" (OuterVolumeSpecName: "kube-api-access-2ds5x") pod "8f47ea9a-7261-4645-bcd9-7abf500d9501" (UID: "8f47ea9a-7261-4645-bcd9-7abf500d9501"). InnerVolumeSpecName "kube-api-access-2ds5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.164324 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-kube-api-access-cs4z8" (OuterVolumeSpecName: "kube-api-access-cs4z8") pod "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" (UID: "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4"). InnerVolumeSpecName "kube-api-access-cs4z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.164345 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8f47ea9a-7261-4645-bcd9-7abf500d9501" (UID: "8f47ea9a-7261-4645-bcd9-7abf500d9501"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.165477 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-kube-api-access-4g6jr" (OuterVolumeSpecName: "kube-api-access-4g6jr") pod "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" (UID: "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54"). InnerVolumeSpecName "kube-api-access-4g6jr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.165972 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" (UID: "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.166279 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-scripts" (OuterVolumeSpecName: "scripts") pod "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" (UID: "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.167065 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-scripts" (OuterVolumeSpecName: "scripts") pod "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" (UID: "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.167420 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" (UID: "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.184447 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-config-data" (OuterVolumeSpecName: "config-data") pod "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" (UID: "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.188676 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" (UID: "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.190822 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" (UID: "6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.198028 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-config-data" (OuterVolumeSpecName: "config-data") pod "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" (UID: "2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.205027 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f47ea9a-7261-4645-bcd9-7abf500d9501" (UID: "8f47ea9a-7261-4645-bcd9-7abf500d9501"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261265 4684 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261302 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261316 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261329 4684 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261341 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261352 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g6jr\" (UniqueName: \"kubernetes.io/projected/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-kube-api-access-4g6jr\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261366 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261377 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261389 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ds5x\" (UniqueName: \"kubernetes.io/projected/8f47ea9a-7261-4645-bcd9-7abf500d9501-kube-api-access-2ds5x\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261401 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261413 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261424 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261436 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs4z8\" (UniqueName: \"kubernetes.io/projected/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4-kube-api-access-cs4z8\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.261448 4684 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f47ea9a-7261-4645-bcd9-7abf500d9501-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:28 crc kubenswrapper[4684]: W1013 13:23:28.344371 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1c77198_2e43_4230_a37d_48e65e09f466.slice/crio-7822b7908f8633a28559abd89d02daf72ffe07f09e9dc04ef0607bb58931a797 WatchSource:0}: Error finding container 7822b7908f8633a28559abd89d02daf72ffe07f09e9dc04ef0607bb58931a797: Status 404 returned error can't find the container with id 7822b7908f8633a28559abd89d02daf72ffe07f09e9dc04ef0607bb58931a797 Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.345622 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-594469df77-tzkgb"] Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.779876 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hjlvf" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.779877 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hjlvf" event={"ID":"6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4","Type":"ContainerDied","Data":"919e430d58a29b15747dffe243f8ca37eff3217398f422e41e895d242cff742f"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.780287 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="919e430d58a29b15747dffe243f8ca37eff3217398f422e41e895d242cff742f" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.782467 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594469df77-tzkgb" event={"ID":"c1c77198-2e43-4230-a37d-48e65e09f466","Type":"ContainerStarted","Data":"7487ef9af632f00f1dd8f386abaf75cd766da2543a30dac17c3c3df78afa09cd"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.782509 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594469df77-tzkgb" event={"ID":"c1c77198-2e43-4230-a37d-48e65e09f466","Type":"ContainerStarted","Data":"911f4470951a5f577cea0d0b74f499ddac695822f7d9f9b8d1d4257a8f7f6e39"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.782522 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594469df77-tzkgb" event={"ID":"c1c77198-2e43-4230-a37d-48e65e09f466","Type":"ContainerStarted","Data":"7822b7908f8633a28559abd89d02daf72ffe07f09e9dc04ef0607bb58931a797"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.782564 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.784942 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.784991 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5qfl5" event={"ID":"8f47ea9a-7261-4645-bcd9-7abf500d9501","Type":"ContainerDied","Data":"5be6ec84215d1c46a6cb61120462ce5c2e254b99bec751cb295fe0324f233154"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.785015 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be6ec84215d1c46a6cb61120462ce5c2e254b99bec751cb295fe0324f233154" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.785030 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.784976 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5qfl5" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.787319 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-77dxj" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.787479 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-77dxj" event={"ID":"2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54","Type":"ContainerDied","Data":"9a9fe7ca00b78c206b0319d08a512365a12c366a7aaff91e35148cdccfca4c30"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.787514 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a9fe7ca00b78c206b0319d08a512365a12c366a7aaff91e35148cdccfca4c30" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.790035 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerStarted","Data":"d4bd186f939643da0e99cbfd16ff7fd4629712142ca109785472dfd69109958d"} Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.829075 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-594469df77-tzkgb" podStartSLOduration=4.829057282 podStartE2EDuration="4.829057282s" podCreationTimestamp="2025-10-13 13:23:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:28.804518239 +0000 UTC m=+963.371902329" watchObservedRunningTime="2025-10-13 13:23:28.829057282 +0000 UTC m=+963.396441352" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.841878 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 13:23:28 crc kubenswrapper[4684]: I1013 13:23:28.850546 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.130670 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5854cb58cb-4hhrx"] Oct 13 13:23:29 crc kubenswrapper[4684]: E1013 13:23:29.131009 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" containerName="placement-db-sync" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.131022 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" containerName="placement-db-sync" Oct 13 13:23:29 crc kubenswrapper[4684]: E1013 13:23:29.131039 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" containerName="keystone-bootstrap" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.131045 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" containerName="keystone-bootstrap" Oct 13 13:23:29 crc kubenswrapper[4684]: E1013 13:23:29.131061 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f47ea9a-7261-4645-bcd9-7abf500d9501" containerName="barbican-db-sync" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.131067 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f47ea9a-7261-4645-bcd9-7abf500d9501" containerName="barbican-db-sync" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.131243 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f47ea9a-7261-4645-bcd9-7abf500d9501" containerName="barbican-db-sync" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.131263 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" containerName="keystone-bootstrap" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.131283 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" containerName="placement-db-sync" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.132325 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.139200 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-d66tw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.139330 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.139365 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.139336 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.139574 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.140791 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5854cb58cb-4hhrx"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.228371 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5c87fbbfdb-hnmd8"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.229391 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.231447 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.231722 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-68s85" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.231861 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.231873 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.232408 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.233077 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.254332 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5c87fbbfdb-hnmd8"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280124 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-combined-ca-bundle\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280169 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-internal-tls-certs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280307 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-public-tls-certs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280345 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-scripts\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280393 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n5kg\" (UniqueName: \"kubernetes.io/projected/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-kube-api-access-8n5kg\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280421 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-logs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.280484 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-config-data\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383270 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrx4p\" (UniqueName: \"kubernetes.io/projected/6b42944c-2de0-47a6-bdb1-70750adb4c3c-kube-api-access-xrx4p\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383392 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-public-tls-certs\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383441 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-combined-ca-bundle\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383473 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-internal-tls-certs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383526 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-credential-keys\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383574 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-scripts\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383633 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-combined-ca-bundle\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383670 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-public-tls-certs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383699 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-scripts\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383738 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-config-data\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383772 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n5kg\" (UniqueName: \"kubernetes.io/projected/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-kube-api-access-8n5kg\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383803 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-fernet-keys\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.383842 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-logs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.384725 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-config-data\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.384786 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-internal-tls-certs\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.392171 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6c9b968d8f-c6bhw"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.394696 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.396344 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-logs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.397191 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-combined-ca-bundle\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.398563 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.399542 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.399884 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-tnbh2" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.407539 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-internal-tls-certs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.427809 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-746497c59d-h7fpp"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.428531 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-scripts\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.431446 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.433835 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.435156 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-config-data\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.436737 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-public-tls-certs\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.452255 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c9b968d8f-c6bhw"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.464969 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n5kg\" (UniqueName: \"kubernetes.io/projected/a8cfa6a6-f09c-4903-a8ce-df37542f7fd2-kube-api-access-8n5kg\") pod \"placement-5854cb58cb-4hhrx\" (UID: \"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2\") " pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.486030 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-746497c59d-h7fpp"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489281 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-public-tls-certs\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489321 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6vm9\" (UniqueName: \"kubernetes.io/projected/290ff522-789b-4ba3-90d4-2047bf14a6de-kube-api-access-z6vm9\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489344 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-combined-ca-bundle\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489364 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-combined-ca-bundle\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489384 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-credential-keys\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489410 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-scripts\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489427 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-config-data\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489448 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-config-data-custom\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489466 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-config-data-custom\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489485 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/290ff522-789b-4ba3-90d4-2047bf14a6de-logs\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489513 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-combined-ca-bundle\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489533 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-config-data\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489555 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-fernet-keys\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489575 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3b2549e-a0de-4650-95fb-c3b8c8998664-logs\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489614 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-internal-tls-certs\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489655 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-config-data\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489678 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrx4p\" (UniqueName: \"kubernetes.io/projected/6b42944c-2de0-47a6-bdb1-70750adb4c3c-kube-api-access-xrx4p\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.489707 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x87v\" (UniqueName: \"kubernetes.io/projected/b3b2549e-a0de-4650-95fb-c3b8c8998664-kube-api-access-2x87v\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.500929 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-credential-keys\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.508877 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-fernet-keys\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.516737 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-public-tls-certs\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.517289 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-combined-ca-bundle\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.518297 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-scripts\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.521046 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-config-data\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.523232 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b42944c-2de0-47a6-bdb1-70750adb4c3c-internal-tls-certs\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.527341 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrx4p\" (UniqueName: \"kubernetes.io/projected/6b42944c-2de0-47a6-bdb1-70750adb4c3c-kube-api-access-xrx4p\") pod \"keystone-5c87fbbfdb-hnmd8\" (UID: \"6b42944c-2de0-47a6-bdb1-70750adb4c3c\") " pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.533056 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74847f7b97-m7fd7"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.533314 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" podUID="74865fc2-5110-4d69-b769-b0c84c802759" containerName="dnsmasq-dns" containerID="cri-o://cbba4fc2a567d014886b43b4ee5acef1da442aa8c084cfcf09440a8a2f3eb9ff" gracePeriod=10 Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.536707 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.551499 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.572965 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b5dbf95fc-22v5n"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.574793 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.588579 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b5dbf95fc-22v5n"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600168 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3b2549e-a0de-4650-95fb-c3b8c8998664-logs\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600281 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-config-data\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600336 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x87v\" (UniqueName: \"kubernetes.io/projected/b3b2549e-a0de-4650-95fb-c3b8c8998664-kube-api-access-2x87v\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600372 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6vm9\" (UniqueName: \"kubernetes.io/projected/290ff522-789b-4ba3-90d4-2047bf14a6de-kube-api-access-z6vm9\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600390 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-combined-ca-bundle\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600409 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-combined-ca-bundle\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600440 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-config-data\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600458 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-config-data-custom\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600476 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-config-data-custom\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.600495 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/290ff522-789b-4ba3-90d4-2047bf14a6de-logs\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.601157 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/290ff522-789b-4ba3-90d4-2047bf14a6de-logs\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.601600 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3b2549e-a0de-4650-95fb-c3b8c8998664-logs\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.607805 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-combined-ca-bundle\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.616658 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-config-data-custom\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.619915 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-config-data\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.623890 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290ff522-789b-4ba3-90d4-2047bf14a6de-combined-ca-bundle\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.632852 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-58b68df49d-vntzm"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.638199 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-config-data-custom\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.638292 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-58b68df49d-vntzm"] Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.639127 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.640710 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3b2549e-a0de-4650-95fb-c3b8c8998664-config-data\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.642488 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6vm9\" (UniqueName: \"kubernetes.io/projected/290ff522-789b-4ba3-90d4-2047bf14a6de-kube-api-access-z6vm9\") pod \"barbican-worker-6c9b968d8f-c6bhw\" (UID: \"290ff522-789b-4ba3-90d4-2047bf14a6de\") " pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.642707 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.658109 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x87v\" (UniqueName: \"kubernetes.io/projected/b3b2549e-a0de-4650-95fb-c3b8c8998664-kube-api-access-2x87v\") pod \"barbican-keystone-listener-746497c59d-h7fpp\" (UID: \"b3b2549e-a0de-4650-95fb-c3b8c8998664\") " pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.677817 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c9b968d8f-c6bhw" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.678249 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.702773 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-nb\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.702975 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-svc\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703091 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-sb\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703160 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703242 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-config\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703312 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-swift-storage-0\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703396 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-combined-ca-bundle\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703475 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9df\" (UniqueName: \"kubernetes.io/projected/3675d011-f8eb-4e9a-9000-85e976ed2ee7-kube-api-access-xn9df\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703563 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3675d011-f8eb-4e9a-9000-85e976ed2ee7-logs\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703686 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gzgr\" (UniqueName: \"kubernetes.io/projected/a6ba537f-3303-4324-b704-22ca556a0f30-kube-api-access-6gzgr\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.703753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data-custom\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.753138 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.804866 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-nb\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.804959 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-svc\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.804995 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-sb\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805014 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805041 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-config\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805058 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-swift-storage-0\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805074 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-combined-ca-bundle\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805108 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9df\" (UniqueName: \"kubernetes.io/projected/3675d011-f8eb-4e9a-9000-85e976ed2ee7-kube-api-access-xn9df\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3675d011-f8eb-4e9a-9000-85e976ed2ee7-logs\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805221 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data-custom\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.805237 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gzgr\" (UniqueName: \"kubernetes.io/projected/a6ba537f-3303-4324-b704-22ca556a0f30-kube-api-access-6gzgr\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.806159 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-nb\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.806952 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3675d011-f8eb-4e9a-9000-85e976ed2ee7-logs\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.807731 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-sb\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.808119 4684 generic.go:334] "Generic (PLEG): container finished" podID="74865fc2-5110-4d69-b769-b0c84c802759" containerID="cbba4fc2a567d014886b43b4ee5acef1da442aa8c084cfcf09440a8a2f3eb9ff" exitCode=0 Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.808781 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" event={"ID":"74865fc2-5110-4d69-b769-b0c84c802759","Type":"ContainerDied","Data":"cbba4fc2a567d014886b43b4ee5acef1da442aa8c084cfcf09440a8a2f3eb9ff"} Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.809610 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-swift-storage-0\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.810322 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-config\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.811061 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.811105 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.818719 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.822384 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-combined-ca-bundle\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.823438 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data-custom\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.824046 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9df\" (UniqueName: \"kubernetes.io/projected/3675d011-f8eb-4e9a-9000-85e976ed2ee7-kube-api-access-xn9df\") pod \"barbican-api-58b68df49d-vntzm\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.824559 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-svc\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.828957 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gzgr\" (UniqueName: \"kubernetes.io/projected/a6ba537f-3303-4324-b704-22ca556a0f30-kube-api-access-6gzgr\") pod \"dnsmasq-dns-6b5dbf95fc-22v5n\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:29 crc kubenswrapper[4684]: I1013 13:23:29.997227 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.007269 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.206931 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.225744 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5c87fbbfdb-hnmd8"] Oct 13 13:23:30 crc kubenswrapper[4684]: W1013 13:23:30.260036 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b42944c_2de0_47a6_bdb1_70750adb4c3c.slice/crio-9cefc9402cb1a1abb47a6259f0e729be29d59238b008464e3682a4f0010fc280 WatchSource:0}: Error finding container 9cefc9402cb1a1abb47a6259f0e729be29d59238b008464e3682a4f0010fc280: Status 404 returned error can't find the container with id 9cefc9402cb1a1abb47a6259f0e729be29d59238b008464e3682a4f0010fc280 Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.320105 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-nb\") pod \"74865fc2-5110-4d69-b769-b0c84c802759\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.320138 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-swift-storage-0\") pod \"74865fc2-5110-4d69-b769-b0c84c802759\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.320203 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-config\") pod \"74865fc2-5110-4d69-b769-b0c84c802759\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.320295 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-sb\") pod \"74865fc2-5110-4d69-b769-b0c84c802759\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.320378 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d49jd\" (UniqueName: \"kubernetes.io/projected/74865fc2-5110-4d69-b769-b0c84c802759-kube-api-access-d49jd\") pod \"74865fc2-5110-4d69-b769-b0c84c802759\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.320437 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-svc\") pod \"74865fc2-5110-4d69-b769-b0c84c802759\" (UID: \"74865fc2-5110-4d69-b769-b0c84c802759\") " Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.331495 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74865fc2-5110-4d69-b769-b0c84c802759-kube-api-access-d49jd" (OuterVolumeSpecName: "kube-api-access-d49jd") pod "74865fc2-5110-4d69-b769-b0c84c802759" (UID: "74865fc2-5110-4d69-b769-b0c84c802759"). InnerVolumeSpecName "kube-api-access-d49jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.340021 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-746497c59d-h7fpp"] Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.386153 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "74865fc2-5110-4d69-b769-b0c84c802759" (UID: "74865fc2-5110-4d69-b769-b0c84c802759"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.387661 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "74865fc2-5110-4d69-b769-b0c84c802759" (UID: "74865fc2-5110-4d69-b769-b0c84c802759"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.404209 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c9b968d8f-c6bhw"] Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.409161 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-config" (OuterVolumeSpecName: "config") pod "74865fc2-5110-4d69-b769-b0c84c802759" (UID: "74865fc2-5110-4d69-b769-b0c84c802759"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.423482 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.423506 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d49jd\" (UniqueName: \"kubernetes.io/projected/74865fc2-5110-4d69-b769-b0c84c802759-kube-api-access-d49jd\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.423515 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.423523 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.432203 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "74865fc2-5110-4d69-b769-b0c84c802759" (UID: "74865fc2-5110-4d69-b769-b0c84c802759"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.452494 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "74865fc2-5110-4d69-b769-b0c84c802759" (UID: "74865fc2-5110-4d69-b769-b0c84c802759"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.525009 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.525049 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/74865fc2-5110-4d69-b769-b0c84c802759-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.537764 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5854cb58cb-4hhrx"] Oct 13 13:23:30 crc kubenswrapper[4684]: W1013 13:23:30.543611 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8cfa6a6_f09c_4903_a8ce_df37542f7fd2.slice/crio-6380bf9e33c3f06ad01e76063a230ab2278ffe394c4ff5ebfb4a0bd5170c2583 WatchSource:0}: Error finding container 6380bf9e33c3f06ad01e76063a230ab2278ffe394c4ff5ebfb4a0bd5170c2583: Status 404 returned error can't find the container with id 6380bf9e33c3f06ad01e76063a230ab2278ffe394c4ff5ebfb4a0bd5170c2583 Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.654091 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-58b68df49d-vntzm"] Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.678815 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b5dbf95fc-22v5n"] Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.833347 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c87fbbfdb-hnmd8" event={"ID":"6b42944c-2de0-47a6-bdb1-70750adb4c3c","Type":"ContainerStarted","Data":"1a493eaccafc770b6f6a84bb69cc1cf7acec06e302f2da5d6f72283f59e99663"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.833653 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c87fbbfdb-hnmd8" event={"ID":"6b42944c-2de0-47a6-bdb1-70750adb4c3c","Type":"ContainerStarted","Data":"9cefc9402cb1a1abb47a6259f0e729be29d59238b008464e3682a4f0010fc280"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.833809 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.835290 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" event={"ID":"74865fc2-5110-4d69-b769-b0c84c802759","Type":"ContainerDied","Data":"06ac7a6d9a1c490e2aa9ddf23d2773cb0e2a686c3dcab4216d2d8327107f9da0"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.835331 4684 scope.go:117] "RemoveContainer" containerID="cbba4fc2a567d014886b43b4ee5acef1da442aa8c084cfcf09440a8a2f3eb9ff" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.835475 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74847f7b97-m7fd7" Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.846218 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5854cb58cb-4hhrx" event={"ID":"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2","Type":"ContainerStarted","Data":"800357549bd282283e5f30b7728b83a94031eacad1c52b2a100790e1d563fc3e"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.846286 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5854cb58cb-4hhrx" event={"ID":"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2","Type":"ContainerStarted","Data":"6380bf9e33c3f06ad01e76063a230ab2278ffe394c4ff5ebfb4a0bd5170c2583"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.848070 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9b968d8f-c6bhw" event={"ID":"290ff522-789b-4ba3-90d4-2047bf14a6de","Type":"ContainerStarted","Data":"668d827f0742c8afcee7604e49e519bfb17388df583a3ffcea3f417bb3b77a04"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.849421 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b68df49d-vntzm" event={"ID":"3675d011-f8eb-4e9a-9000-85e976ed2ee7","Type":"ContainerStarted","Data":"74c394c0acaf4fb0ce2f4f7526c38caa5c9aed3127066238d6ea98a396003d07"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.857391 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" event={"ID":"a6ba537f-3303-4324-b704-22ca556a0f30","Type":"ContainerStarted","Data":"8411ebb30eccdd7bc0c727172f691e60e9a1a1cf506c3c095e2b3ce5df3d42ca"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.864775 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" event={"ID":"b3b2549e-a0de-4650-95fb-c3b8c8998664","Type":"ContainerStarted","Data":"6e55838ca5312de040f291a03c0950014d93145ebee67e55fa0993b1db9195a8"} Oct 13 13:23:30 crc kubenswrapper[4684]: I1013 13:23:30.871406 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5c87fbbfdb-hnmd8" podStartSLOduration=1.8713864409999998 podStartE2EDuration="1.871386441s" podCreationTimestamp="2025-10-13 13:23:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:30.86757987 +0000 UTC m=+965.434963960" watchObservedRunningTime="2025-10-13 13:23:30.871386441 +0000 UTC m=+965.438770531" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.032082 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74847f7b97-m7fd7"] Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.038038 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74847f7b97-m7fd7"] Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.053488 4684 scope.go:117] "RemoveContainer" containerID="87134103cd23a720ea011361a3a0fbb826bbc5c1934cb01395c373fb63c27139" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.809455 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.809502 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.844510 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.854446 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.878678 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b68df49d-vntzm" event={"ID":"3675d011-f8eb-4e9a-9000-85e976ed2ee7","Type":"ContainerStarted","Data":"d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a"} Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.878718 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b68df49d-vntzm" event={"ID":"3675d011-f8eb-4e9a-9000-85e976ed2ee7","Type":"ContainerStarted","Data":"60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26"} Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.879562 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.879591 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.881933 4684 generic.go:334] "Generic (PLEG): container finished" podID="a6ba537f-3303-4324-b704-22ca556a0f30" containerID="6eb24f1be35ec2c38fda6c16961c19cd6e9bd43a8af574380b5fb9550412fac4" exitCode=0 Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.881985 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" event={"ID":"a6ba537f-3303-4324-b704-22ca556a0f30","Type":"ContainerDied","Data":"6eb24f1be35ec2c38fda6c16961c19cd6e9bd43a8af574380b5fb9550412fac4"} Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.892958 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5854cb58cb-4hhrx" event={"ID":"a8cfa6a6-f09c-4903-a8ce-df37542f7fd2","Type":"ContainerStarted","Data":"207670effd9526c47172ba70a5b90157da9393e433f47a9d4a0904ff64f15da0"} Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.893610 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.893643 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.893675 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.893685 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.893082 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.894061 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.932307 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-58b68df49d-vntzm" podStartSLOduration=2.932290803 podStartE2EDuration="2.932290803s" podCreationTimestamp="2025-10-13 13:23:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:31.923127191 +0000 UTC m=+966.490511271" watchObservedRunningTime="2025-10-13 13:23:31.932290803 +0000 UTC m=+966.499674873" Oct 13 13:23:31 crc kubenswrapper[4684]: I1013 13:23:31.947633 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5854cb58cb-4hhrx" podStartSLOduration=2.947614152 podStartE2EDuration="2.947614152s" podCreationTimestamp="2025-10-13 13:23:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:31.946927341 +0000 UTC m=+966.514311411" watchObservedRunningTime="2025-10-13 13:23:31.947614152 +0000 UTC m=+966.514998212" Oct 13 13:23:32 crc kubenswrapper[4684]: I1013 13:23:32.005677 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 13:23:32 crc kubenswrapper[4684]: I1013 13:23:32.009494 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 13:23:32 crc kubenswrapper[4684]: I1013 13:23:32.371367 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74865fc2-5110-4d69-b769-b0c84c802759" path="/var/lib/kubelet/pods/74865fc2-5110-4d69-b769-b0c84c802759/volumes" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.033263 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6c8dbf9d98-ml255"] Oct 13 13:23:33 crc kubenswrapper[4684]: E1013 13:23:33.033715 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74865fc2-5110-4d69-b769-b0c84c802759" containerName="dnsmasq-dns" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.033733 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="74865fc2-5110-4d69-b769-b0c84c802759" containerName="dnsmasq-dns" Oct 13 13:23:33 crc kubenswrapper[4684]: E1013 13:23:33.033754 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74865fc2-5110-4d69-b769-b0c84c802759" containerName="init" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.033762 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="74865fc2-5110-4d69-b769-b0c84c802759" containerName="init" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.034048 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="74865fc2-5110-4d69-b769-b0c84c802759" containerName="dnsmasq-dns" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.035321 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.037310 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.037471 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.050441 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6c8dbf9d98-ml255"] Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177567 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-public-tls-certs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177656 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-config-data\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177732 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-internal-tls-certs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177765 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-logs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177812 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-config-data-custom\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177887 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp7g9\" (UniqueName: \"kubernetes.io/projected/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-kube-api-access-pp7g9\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.177982 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-combined-ca-bundle\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281546 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-public-tls-certs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281616 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-config-data\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281668 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-internal-tls-certs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281686 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-logs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281717 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-config-data-custom\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281737 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp7g9\" (UniqueName: \"kubernetes.io/projected/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-kube-api-access-pp7g9\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.281761 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-combined-ca-bundle\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.282249 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-logs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.286526 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-public-tls-certs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.287046 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-internal-tls-certs\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.288122 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-combined-ca-bundle\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.288440 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-config-data-custom\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.288969 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-config-data\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.298775 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp7g9\" (UniqueName: \"kubernetes.io/projected/cafbe5b0-5ce7-4f2b-ac20-4f95592dc662-kube-api-access-pp7g9\") pod \"barbican-api-6c8dbf9d98-ml255\" (UID: \"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662\") " pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.380366 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.883234 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6c8dbf9d98-ml255"] Oct 13 13:23:33 crc kubenswrapper[4684]: W1013 13:23:33.890710 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcafbe5b0_5ce7_4f2b_ac20_4f95592dc662.slice/crio-9b9e16fec7226fcb3100464e47e7489155e78df4e0ba2e49e662abbb269d79ab WatchSource:0}: Error finding container 9b9e16fec7226fcb3100464e47e7489155e78df4e0ba2e49e662abbb269d79ab: Status 404 returned error can't find the container with id 9b9e16fec7226fcb3100464e47e7489155e78df4e0ba2e49e662abbb269d79ab Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.938743 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" event={"ID":"a6ba537f-3303-4324-b704-22ca556a0f30","Type":"ContainerStarted","Data":"039fdb9bd806b764efc9f130dc2b57f0d51788963880670cb44c25d3a7285915"} Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.938889 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.940954 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" event={"ID":"b3b2549e-a0de-4650-95fb-c3b8c8998664","Type":"ContainerStarted","Data":"f8e6d1f72ca8a7193a42a47f02b7f7da7ec1428c346f250abbbcdc301a7fd5c5"} Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.941006 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" event={"ID":"b3b2549e-a0de-4650-95fb-c3b8c8998664","Type":"ContainerStarted","Data":"fc5127ac1dac3f74c2a5da0cb8717872e24720bd2b4bd6477e2f18abe6d51a0d"} Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.945223 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9b968d8f-c6bhw" event={"ID":"290ff522-789b-4ba3-90d4-2047bf14a6de","Type":"ContainerStarted","Data":"5c961da4096bbd596dcf9e5bd46688987e6e7ab438a7f17a57c2db20031853c1"} Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.945280 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9b968d8f-c6bhw" event={"ID":"290ff522-789b-4ba3-90d4-2047bf14a6de","Type":"ContainerStarted","Data":"93521a3205ec8209b03bf7c8269b878a4c48554be87dda370fd91ab3424bc7bc"} Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.964811 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8dbf9d98-ml255" event={"ID":"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662","Type":"ContainerStarted","Data":"9b9e16fec7226fcb3100464e47e7489155e78df4e0ba2e49e662abbb269d79ab"} Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.967183 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" podStartSLOduration=4.967160524 podStartE2EDuration="4.967160524s" podCreationTimestamp="2025-10-13 13:23:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:33.959038645 +0000 UTC m=+968.526422735" watchObservedRunningTime="2025-10-13 13:23:33.967160524 +0000 UTC m=+968.534544594" Oct 13 13:23:33 crc kubenswrapper[4684]: I1013 13:23:33.994483 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-746497c59d-h7fpp" podStartSLOduration=2.723202343 podStartE2EDuration="4.994467365s" podCreationTimestamp="2025-10-13 13:23:29 +0000 UTC" firstStartedPulling="2025-10-13 13:23:30.372158666 +0000 UTC m=+964.939542736" lastFinishedPulling="2025-10-13 13:23:32.643423688 +0000 UTC m=+967.210807758" observedRunningTime="2025-10-13 13:23:33.98520655 +0000 UTC m=+968.552590620" watchObservedRunningTime="2025-10-13 13:23:33.994467365 +0000 UTC m=+968.561851435" Oct 13 13:23:34 crc kubenswrapper[4684]: I1013 13:23:34.008807 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6c9b968d8f-c6bhw" podStartSLOduration=2.770047407 podStartE2EDuration="5.008784692s" podCreationTimestamp="2025-10-13 13:23:29 +0000 UTC" firstStartedPulling="2025-10-13 13:23:30.40772286 +0000 UTC m=+964.975106930" lastFinishedPulling="2025-10-13 13:23:32.646460125 +0000 UTC m=+967.213844215" observedRunningTime="2025-10-13 13:23:34.001229261 +0000 UTC m=+968.568613351" watchObservedRunningTime="2025-10-13 13:23:34.008784692 +0000 UTC m=+968.576168762" Oct 13 13:23:34 crc kubenswrapper[4684]: I1013 13:23:34.218218 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:34 crc kubenswrapper[4684]: I1013 13:23:34.218658 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 13:23:34 crc kubenswrapper[4684]: I1013 13:23:34.984665 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8dbf9d98-ml255" event={"ID":"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662","Type":"ContainerStarted","Data":"a3785470d1da984fbfee3f8e59725c03deda99a9aef3faf425f4acb249f7d01f"} Oct 13 13:23:34 crc kubenswrapper[4684]: I1013 13:23:34.985095 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8dbf9d98-ml255" event={"ID":"cafbe5b0-5ce7-4f2b-ac20-4f95592dc662","Type":"ContainerStarted","Data":"7d9d35d43d38b9e8526dcabb5d1b93e515d7e09d1f25ae546fc533f6064b700f"} Oct 13 13:23:35 crc kubenswrapper[4684]: I1013 13:23:35.029675 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6c8dbf9d98-ml255" podStartSLOduration=2.029650807 podStartE2EDuration="2.029650807s" podCreationTimestamp="2025-10-13 13:23:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:35.010380332 +0000 UTC m=+969.577764402" watchObservedRunningTime="2025-10-13 13:23:35.029650807 +0000 UTC m=+969.597034877" Oct 13 13:23:35 crc kubenswrapper[4684]: I1013 13:23:35.993186 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:35 crc kubenswrapper[4684]: I1013 13:23:35.993527 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:36 crc kubenswrapper[4684]: I1013 13:23:36.724468 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:37 crc kubenswrapper[4684]: I1013 13:23:37.002833 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qprxl" event={"ID":"5f3242f8-a0ba-4799-bd79-a0523603fb37","Type":"ContainerStarted","Data":"ab3d5694d17c09d8f2464b90164a56fddc9fea25a825a0065c1309113c0a918d"} Oct 13 13:23:38 crc kubenswrapper[4684]: I1013 13:23:38.138201 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:38 crc kubenswrapper[4684]: I1013 13:23:38.171944 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-qprxl" podStartSLOduration=4.690183237 podStartE2EDuration="44.171915794s" podCreationTimestamp="2025-10-13 13:22:54 +0000 UTC" firstStartedPulling="2025-10-13 13:22:56.020856276 +0000 UTC m=+930.588240346" lastFinishedPulling="2025-10-13 13:23:35.502588833 +0000 UTC m=+970.069972903" observedRunningTime="2025-10-13 13:23:37.028255431 +0000 UTC m=+971.595639501" watchObservedRunningTime="2025-10-13 13:23:38.171915794 +0000 UTC m=+972.739299874" Oct 13 13:23:40 crc kubenswrapper[4684]: I1013 13:23:40.000154 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:40 crc kubenswrapper[4684]: I1013 13:23:40.072239 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bb74c5949-vwrg8"] Oct 13 13:23:40 crc kubenswrapper[4684]: I1013 13:23:40.072726 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerName="dnsmasq-dns" containerID="cri-o://69678d1c25a7e21f8963cde39a381700339f394afc1d77ea7f6f8b9e35ebe63e" gracePeriod=10 Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.085111 4684 generic.go:334] "Generic (PLEG): container finished" podID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerID="69678d1c25a7e21f8963cde39a381700339f394afc1d77ea7f6f8b9e35ebe63e" exitCode=0 Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.085250 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" event={"ID":"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b","Type":"ContainerDied","Data":"69678d1c25a7e21f8963cde39a381700339f394afc1d77ea7f6f8b9e35ebe63e"} Oct 13 13:23:41 crc kubenswrapper[4684]: E1013 13:23:41.199627 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.263608 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.363846 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzjll\" (UniqueName: \"kubernetes.io/projected/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-kube-api-access-hzjll\") pod \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.363882 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-nb\") pod \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.364055 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-swift-storage-0\") pod \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.364104 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-sb\") pod \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.364134 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-config\") pod \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.364185 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-svc\") pod \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\" (UID: \"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b\") " Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.369394 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-kube-api-access-hzjll" (OuterVolumeSpecName: "kube-api-access-hzjll") pod "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" (UID: "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b"). InnerVolumeSpecName "kube-api-access-hzjll". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.408819 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" (UID: "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.420823 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-config" (OuterVolumeSpecName: "config") pod "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" (UID: "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.425661 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" (UID: "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.427787 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" (UID: "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.437358 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" (UID: "87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.465985 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.466033 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.466042 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.466052 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.466061 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzjll\" (UniqueName: \"kubernetes.io/projected/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-kube-api-access-hzjll\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:41 crc kubenswrapper[4684]: I1013 13:23:41.466070 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.108047 4684 generic.go:334] "Generic (PLEG): container finished" podID="5f3242f8-a0ba-4799-bd79-a0523603fb37" containerID="ab3d5694d17c09d8f2464b90164a56fddc9fea25a825a0065c1309113c0a918d" exitCode=0 Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.108845 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qprxl" event={"ID":"5f3242f8-a0ba-4799-bd79-a0523603fb37","Type":"ContainerDied","Data":"ab3d5694d17c09d8f2464b90164a56fddc9fea25a825a0065c1309113c0a918d"} Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.114629 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" event={"ID":"87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b","Type":"ContainerDied","Data":"bf6f9113d3a4421208dcb5edb5053215361dcb4caa3808d57b25c66826c8f12b"} Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.114727 4684 scope.go:117] "RemoveContainer" containerID="69678d1c25a7e21f8963cde39a381700339f394afc1d77ea7f6f8b9e35ebe63e" Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.114757 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb74c5949-vwrg8" Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.119879 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerStarted","Data":"a3fc47451ec2e2c36115bef2bc92d3b3b8ffc63dedf2fc8c1b4388ff22785039"} Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.120086 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="ceilometer-notification-agent" containerID="cri-o://7e2eecca1b38085002d5b6e5c133f0fdd51295f4948a21b5d40b8c73e1a02ec1" gracePeriod=30 Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.120190 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.120219 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="sg-core" containerID="cri-o://d4bd186f939643da0e99cbfd16ff7fd4629712142ca109785472dfd69109958d" gracePeriod=30 Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.120286 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="proxy-httpd" containerID="cri-o://a3fc47451ec2e2c36115bef2bc92d3b3b8ffc63dedf2fc8c1b4388ff22785039" gracePeriod=30 Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.162551 4684 scope.go:117] "RemoveContainer" containerID="dee2a9a4f94d3c8bbb8c86de65cd7fb0441ce8e7a1aeaec809ed60f3d0a167ee" Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.197652 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bb74c5949-vwrg8"] Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.207482 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bb74c5949-vwrg8"] Oct 13 13:23:42 crc kubenswrapper[4684]: I1013 13:23:42.360805 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" path="/var/lib/kubelet/pods/87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b/volumes" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.137788 4684 generic.go:334] "Generic (PLEG): container finished" podID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerID="a3fc47451ec2e2c36115bef2bc92d3b3b8ffc63dedf2fc8c1b4388ff22785039" exitCode=0 Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.138257 4684 generic.go:334] "Generic (PLEG): container finished" podID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerID="d4bd186f939643da0e99cbfd16ff7fd4629712142ca109785472dfd69109958d" exitCode=2 Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.137882 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerDied","Data":"a3fc47451ec2e2c36115bef2bc92d3b3b8ffc63dedf2fc8c1b4388ff22785039"} Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.138369 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerDied","Data":"d4bd186f939643da0e99cbfd16ff7fd4629712142ca109785472dfd69109958d"} Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.523175 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qprxl" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606376 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-scripts\") pod \"5f3242f8-a0ba-4799-bd79-a0523603fb37\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606449 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcblg\" (UniqueName: \"kubernetes.io/projected/5f3242f8-a0ba-4799-bd79-a0523603fb37-kube-api-access-dcblg\") pod \"5f3242f8-a0ba-4799-bd79-a0523603fb37\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606522 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f3242f8-a0ba-4799-bd79-a0523603fb37-etc-machine-id\") pod \"5f3242f8-a0ba-4799-bd79-a0523603fb37\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606575 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-db-sync-config-data\") pod \"5f3242f8-a0ba-4799-bd79-a0523603fb37\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606613 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-combined-ca-bundle\") pod \"5f3242f8-a0ba-4799-bd79-a0523603fb37\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606665 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-config-data\") pod \"5f3242f8-a0ba-4799-bd79-a0523603fb37\" (UID: \"5f3242f8-a0ba-4799-bd79-a0523603fb37\") " Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.606656 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5f3242f8-a0ba-4799-bd79-a0523603fb37-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5f3242f8-a0ba-4799-bd79-a0523603fb37" (UID: "5f3242f8-a0ba-4799-bd79-a0523603fb37"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.607046 4684 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f3242f8-a0ba-4799-bd79-a0523603fb37-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.612765 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3242f8-a0ba-4799-bd79-a0523603fb37-kube-api-access-dcblg" (OuterVolumeSpecName: "kube-api-access-dcblg") pod "5f3242f8-a0ba-4799-bd79-a0523603fb37" (UID: "5f3242f8-a0ba-4799-bd79-a0523603fb37"). InnerVolumeSpecName "kube-api-access-dcblg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.615435 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5f3242f8-a0ba-4799-bd79-a0523603fb37" (UID: "5f3242f8-a0ba-4799-bd79-a0523603fb37"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.615469 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-scripts" (OuterVolumeSpecName: "scripts") pod "5f3242f8-a0ba-4799-bd79-a0523603fb37" (UID: "5f3242f8-a0ba-4799-bd79-a0523603fb37"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.637505 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f3242f8-a0ba-4799-bd79-a0523603fb37" (UID: "5f3242f8-a0ba-4799-bd79-a0523603fb37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.655356 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-config-data" (OuterVolumeSpecName: "config-data") pod "5f3242f8-a0ba-4799-bd79-a0523603fb37" (UID: "5f3242f8-a0ba-4799-bd79-a0523603fb37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.708489 4684 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.708849 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.708868 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.708886 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f3242f8-a0ba-4799-bd79-a0523603fb37-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:43 crc kubenswrapper[4684]: I1013 13:23:43.708924 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcblg\" (UniqueName: \"kubernetes.io/projected/5f3242f8-a0ba-4799-bd79-a0523603fb37-kube-api-access-dcblg\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.167337 4684 generic.go:334] "Generic (PLEG): container finished" podID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerID="7e2eecca1b38085002d5b6e5c133f0fdd51295f4948a21b5d40b8c73e1a02ec1" exitCode=0 Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.167412 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerDied","Data":"7e2eecca1b38085002d5b6e5c133f0fdd51295f4948a21b5d40b8c73e1a02ec1"} Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.186321 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qprxl" event={"ID":"5f3242f8-a0ba-4799-bd79-a0523603fb37","Type":"ContainerDied","Data":"fd75ee02e043dfff7bbb1302a2565caec4e28859a4cf475ef17f2f7e642d5d3f"} Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.186387 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd75ee02e043dfff7bbb1302a2565caec4e28859a4cf475ef17f2f7e642d5d3f" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.186394 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qprxl" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.255106 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.317761 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-sg-core-conf-yaml\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.317817 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9p2rc\" (UniqueName: \"kubernetes.io/projected/91117a69-e1f3-4e2d-9973-8c6f758962c2-kube-api-access-9p2rc\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.317878 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-combined-ca-bundle\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.317948 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-run-httpd\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.317973 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-config-data\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.317997 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-log-httpd\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.318055 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-scripts\") pod \"91117a69-e1f3-4e2d-9973-8c6f758962c2\" (UID: \"91117a69-e1f3-4e2d-9973-8c6f758962c2\") " Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.318705 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.318691 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.323156 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-scripts" (OuterVolumeSpecName: "scripts") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.326023 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91117a69-e1f3-4e2d-9973-8c6f758962c2-kube-api-access-9p2rc" (OuterVolumeSpecName: "kube-api-access-9p2rc") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "kube-api-access-9p2rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.358477 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.386379 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.421236 4684 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.421577 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9p2rc\" (UniqueName: \"kubernetes.io/projected/91117a69-e1f3-4e2d-9973-8c6f758962c2-kube-api-access-9p2rc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.421594 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.421607 4684 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.421618 4684 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91117a69-e1f3-4e2d-9973-8c6f758962c2-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.421627 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.448568 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-config-data" (OuterVolumeSpecName: "config-data") pod "91117a69-e1f3-4e2d-9973-8c6f758962c2" (UID: "91117a69-e1f3-4e2d-9973-8c6f758962c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.451484 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:44 crc kubenswrapper[4684]: E1013 13:23:44.451885 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3242f8-a0ba-4799-bd79-a0523603fb37" containerName="cinder-db-sync" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.451916 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3242f8-a0ba-4799-bd79-a0523603fb37" containerName="cinder-db-sync" Oct 13 13:23:44 crc kubenswrapper[4684]: E1013 13:23:44.452171 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerName="init" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452183 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerName="init" Oct 13 13:23:44 crc kubenswrapper[4684]: E1013 13:23:44.452194 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="sg-core" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452202 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="sg-core" Oct 13 13:23:44 crc kubenswrapper[4684]: E1013 13:23:44.452219 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="proxy-httpd" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452226 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="proxy-httpd" Oct 13 13:23:44 crc kubenswrapper[4684]: E1013 13:23:44.452235 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerName="dnsmasq-dns" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452242 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerName="dnsmasq-dns" Oct 13 13:23:44 crc kubenswrapper[4684]: E1013 13:23:44.452254 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="ceilometer-notification-agent" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452261 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="ceilometer-notification-agent" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452510 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="proxy-httpd" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452528 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="ceilometer-notification-agent" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452539 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" containerName="sg-core" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452550 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3242f8-a0ba-4799-bd79-a0523603fb37" containerName="cinder-db-sync" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.452562 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ceea86-7d4b-4788-9b5a-cf7b55a8fd8b" containerName="dnsmasq-dns" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.453670 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.467161 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zc47m" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.467440 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.467595 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.471607 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.484432 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523095 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523176 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-scripts\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523241 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb64p\" (UniqueName: \"kubernetes.io/projected/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-kube-api-access-fb64p\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523303 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523377 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523414 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.523500 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91117a69-e1f3-4e2d-9973-8c6f758962c2-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.538256 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8dc864ccc-874nt"] Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.540393 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.556259 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8dc864ccc-874nt"] Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625057 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-swift-storage-0\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625147 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625217 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625251 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625297 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k87tz\" (UniqueName: \"kubernetes.io/projected/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-kube-api-access-k87tz\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625338 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-sb\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625366 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-config\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625403 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625444 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-nb\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625473 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-scripts\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625499 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-svc\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625556 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb64p\" (UniqueName: \"kubernetes.io/projected/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-kube-api-access-fb64p\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.625828 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.629106 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-scripts\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.629233 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.629531 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.641393 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.644242 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb64p\" (UniqueName: \"kubernetes.io/projected/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-kube-api-access-fb64p\") pod \"cinder-scheduler-0\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.726746 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k87tz\" (UniqueName: \"kubernetes.io/projected/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-kube-api-access-k87tz\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.726812 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-sb\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.726837 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-config\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.726876 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-nb\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.726913 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-svc\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.726958 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-swift-storage-0\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.727980 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-swift-storage-0\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.728038 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-nb\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.728049 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-sb\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.728566 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-svc\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.728587 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.728760 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-config\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.730358 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.735745 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.748637 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k87tz\" (UniqueName: \"kubernetes.io/projected/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-kube-api-access-k87tz\") pod \"dnsmasq-dns-8dc864ccc-874nt\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.752617 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.827997 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8mvg\" (UniqueName: \"kubernetes.io/projected/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-kube-api-access-k8mvg\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.828043 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.828068 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-logs\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.828255 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.828479 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-scripts\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.828531 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.828554 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.862544 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.874077 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.933825 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-scripts\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.933919 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.933955 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.934027 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8mvg\" (UniqueName: \"kubernetes.io/projected/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-kube-api-access-k8mvg\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.934073 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.934108 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-logs\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.934163 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.939299 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.940545 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-logs\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.945526 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.946353 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-scripts\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.946958 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.947187 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:44 crc kubenswrapper[4684]: I1013 13:23:44.983570 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8mvg\" (UniqueName: \"kubernetes.io/projected/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-kube-api-access-k8mvg\") pod \"cinder-api-0\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " pod="openstack/cinder-api-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.049152 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.208513 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91117a69-e1f3-4e2d-9973-8c6f758962c2","Type":"ContainerDied","Data":"1f9a249b5d2f35e5e8f80939c4930ab2727810d363f56c853f87785f23671a0c"} Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.208559 4684 scope.go:117] "RemoveContainer" containerID="a3fc47451ec2e2c36115bef2bc92d3b3b8ffc63dedf2fc8c1b4388ff22785039" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.208609 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.251338 4684 scope.go:117] "RemoveContainer" containerID="d4bd186f939643da0e99cbfd16ff7fd4629712142ca109785472dfd69109958d" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.265544 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.309014 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.323797 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.330492 4684 scope.go:117] "RemoveContainer" containerID="7e2eecca1b38085002d5b6e5c133f0fdd51295f4948a21b5d40b8c73e1a02ec1" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.337924 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.340682 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.343152 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.343336 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.353863 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:23:45 crc kubenswrapper[4684]: W1013 13:23:45.426881 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f4b5f89_ca90_419c_8fa2_96a6d4f8e36f.slice/crio-4e4b6158606f9eaa24346eca1b78e501c0674f7d60d318d91ef1a0b089b56ec9 WatchSource:0}: Error finding container 4e4b6158606f9eaa24346eca1b78e501c0674f7d60d318d91ef1a0b089b56ec9: Status 404 returned error can't find the container with id 4e4b6158606f9eaa24346eca1b78e501c0674f7d60d318d91ef1a0b089b56ec9 Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.431493 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.439219 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6c8dbf9d98-ml255" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445145 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98l94\" (UniqueName: \"kubernetes.io/projected/72ac328b-e864-4a16-954e-52fc0e0972e5-kube-api-access-98l94\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445231 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-log-httpd\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445251 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-scripts\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445271 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445300 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-run-httpd\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445323 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-config-data\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.445356 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.506759 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-58b68df49d-vntzm"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.507966 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-58b68df49d-vntzm" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api-log" containerID="cri-o://60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26" gracePeriod=30 Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.508305 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-58b68df49d-vntzm" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api" containerID="cri-o://d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a" gracePeriod=30 Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.538223 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8dc864ccc-874nt"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546725 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-log-httpd\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546756 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-scripts\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546778 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546807 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-run-httpd\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546831 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-config-data\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546863 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.546939 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98l94\" (UniqueName: \"kubernetes.io/projected/72ac328b-e864-4a16-954e-52fc0e0972e5-kube-api-access-98l94\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.551476 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-log-httpd\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.552166 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.552393 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-run-httpd\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.567727 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-scripts\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.569538 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.570472 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-config-data\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.581956 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98l94\" (UniqueName: \"kubernetes.io/projected/72ac328b-e864-4a16-954e-52fc0e0972e5-kube-api-access-98l94\") pod \"ceilometer-0\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " pod="openstack/ceilometer-0" Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.615211 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:45 crc kubenswrapper[4684]: I1013 13:23:45.667450 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.220923 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"60ea9822-2a7b-4b65-9267-6d6bca3b38ac","Type":"ContainerStarted","Data":"99c4a58aea03888e7a4d9c645a31225bba3c564c3fa3ef54866fb14f70840191"} Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.225248 4684 generic.go:334] "Generic (PLEG): container finished" podID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerID="64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1" exitCode=0 Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.225292 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" event={"ID":"1779fa84-a997-4fa9-8c51-9fbb2949b8f7","Type":"ContainerDied","Data":"64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1"} Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.225338 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" event={"ID":"1779fa84-a997-4fa9-8c51-9fbb2949b8f7","Type":"ContainerStarted","Data":"37ac213fb8bf054f77585dfec11a8041a395e987b4ef630d64e690222319f6b8"} Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.228481 4684 generic.go:334] "Generic (PLEG): container finished" podID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerID="60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26" exitCode=143 Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.228849 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b68df49d-vntzm" event={"ID":"3675d011-f8eb-4e9a-9000-85e976ed2ee7","Type":"ContainerDied","Data":"60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26"} Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.236849 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f","Type":"ContainerStarted","Data":"4e4b6158606f9eaa24346eca1b78e501c0674f7d60d318d91ef1a0b089b56ec9"} Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.340703 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.377841 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91117a69-e1f3-4e2d-9973-8c6f758962c2" path="/var/lib/kubelet/pods/91117a69-e1f3-4e2d-9973-8c6f758962c2/volumes" Oct 13 13:23:46 crc kubenswrapper[4684]: I1013 13:23:46.856394 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.247351 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"60ea9822-2a7b-4b65-9267-6d6bca3b38ac","Type":"ContainerStarted","Data":"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4"} Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.251116 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f","Type":"ContainerStarted","Data":"eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d"} Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.253703 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerStarted","Data":"fee5531fd6f9aa0296eedd00fbd369fe6d94645d5638b62aef8b0bc6b67c3ae7"} Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.253809 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerStarted","Data":"1e1888132d74d5120b37f08ebdc58f4e8d41bd4dc6e40efd8bbd97300161fee0"} Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.257772 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" event={"ID":"1779fa84-a997-4fa9-8c51-9fbb2949b8f7","Type":"ContainerStarted","Data":"0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918"} Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.257918 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:47 crc kubenswrapper[4684]: I1013 13:23:47.278761 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" podStartSLOduration=3.278741986 podStartE2EDuration="3.278741986s" podCreationTimestamp="2025-10-13 13:23:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:47.272629321 +0000 UTC m=+981.840013391" watchObservedRunningTime="2025-10-13 13:23:47.278741986 +0000 UTC m=+981.846126056" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.270192 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f","Type":"ContainerStarted","Data":"d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861"} Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.273602 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"60ea9822-2a7b-4b65-9267-6d6bca3b38ac","Type":"ContainerStarted","Data":"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14"} Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.273752 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.273832 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api-log" containerID="cri-o://4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4" gracePeriod=30 Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.273846 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api" containerID="cri-o://287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14" gracePeriod=30 Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.288690 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerStarted","Data":"d00f1d8dfbe3014f240aed3605e4b80c496c7b7d08a8255f7a51f3e8b87845a3"} Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.288733 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerStarted","Data":"c41a6faeddb50508f6b88725e895922257647635549bbc91beadcd5c799011ea"} Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.300461 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.866458254 podStartE2EDuration="4.300441368s" podCreationTimestamp="2025-10-13 13:23:44 +0000 UTC" firstStartedPulling="2025-10-13 13:23:45.428691061 +0000 UTC m=+979.996075131" lastFinishedPulling="2025-10-13 13:23:45.862674175 +0000 UTC m=+980.430058245" observedRunningTime="2025-10-13 13:23:48.300098427 +0000 UTC m=+982.867482507" watchObservedRunningTime="2025-10-13 13:23:48.300441368 +0000 UTC m=+982.867825428" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.330135 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.330100934 podStartE2EDuration="4.330100934s" podCreationTimestamp="2025-10-13 13:23:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:48.324139415 +0000 UTC m=+982.891523505" watchObservedRunningTime="2025-10-13 13:23:48.330100934 +0000 UTC m=+982.897485014" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.856107 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923199 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-etc-machine-id\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923283 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923311 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-combined-ca-bundle\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923323 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923333 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data-custom\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923445 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-logs\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923550 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-scripts\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.923576 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8mvg\" (UniqueName: \"kubernetes.io/projected/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-kube-api-access-k8mvg\") pod \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\" (UID: \"60ea9822-2a7b-4b65-9267-6d6bca3b38ac\") " Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.924012 4684 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.924022 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-logs" (OuterVolumeSpecName: "logs") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.930482 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.930534 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-scripts" (OuterVolumeSpecName: "scripts") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.953779 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.955373 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-kube-api-access-k8mvg" (OuterVolumeSpecName: "kube-api-access-k8mvg") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "kube-api-access-k8mvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:48 crc kubenswrapper[4684]: I1013 13:23:48.987937 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data" (OuterVolumeSpecName: "config-data") pod "60ea9822-2a7b-4b65-9267-6d6bca3b38ac" (UID: "60ea9822-2a7b-4b65-9267-6d6bca3b38ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.025519 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.025553 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8mvg\" (UniqueName: \"kubernetes.io/projected/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-kube-api-access-k8mvg\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.025564 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.025573 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.025582 4684 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.025592 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60ea9822-2a7b-4b65-9267-6d6bca3b38ac-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.035745 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.126889 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn9df\" (UniqueName: \"kubernetes.io/projected/3675d011-f8eb-4e9a-9000-85e976ed2ee7-kube-api-access-xn9df\") pod \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.127483 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data-custom\") pod \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.127678 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-combined-ca-bundle\") pod \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.128003 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3675d011-f8eb-4e9a-9000-85e976ed2ee7-logs\") pod \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.128158 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data\") pod \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\" (UID: \"3675d011-f8eb-4e9a-9000-85e976ed2ee7\") " Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.129671 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3675d011-f8eb-4e9a-9000-85e976ed2ee7-logs" (OuterVolumeSpecName: "logs") pod "3675d011-f8eb-4e9a-9000-85e976ed2ee7" (UID: "3675d011-f8eb-4e9a-9000-85e976ed2ee7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.130244 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3675d011-f8eb-4e9a-9000-85e976ed2ee7-kube-api-access-xn9df" (OuterVolumeSpecName: "kube-api-access-xn9df") pod "3675d011-f8eb-4e9a-9000-85e976ed2ee7" (UID: "3675d011-f8eb-4e9a-9000-85e976ed2ee7"). InnerVolumeSpecName "kube-api-access-xn9df". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.130560 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3675d011-f8eb-4e9a-9000-85e976ed2ee7" (UID: "3675d011-f8eb-4e9a-9000-85e976ed2ee7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.155802 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3675d011-f8eb-4e9a-9000-85e976ed2ee7" (UID: "3675d011-f8eb-4e9a-9000-85e976ed2ee7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.181139 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data" (OuterVolumeSpecName: "config-data") pod "3675d011-f8eb-4e9a-9000-85e976ed2ee7" (UID: "3675d011-f8eb-4e9a-9000-85e976ed2ee7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.230735 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3675d011-f8eb-4e9a-9000-85e976ed2ee7-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.230946 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.231029 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn9df\" (UniqueName: \"kubernetes.io/projected/3675d011-f8eb-4e9a-9000-85e976ed2ee7-kube-api-access-xn9df\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.231095 4684 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.231149 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3675d011-f8eb-4e9a-9000-85e976ed2ee7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.310398 4684 generic.go:334] "Generic (PLEG): container finished" podID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerID="d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a" exitCode=0 Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.310474 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b68df49d-vntzm" event={"ID":"3675d011-f8eb-4e9a-9000-85e976ed2ee7","Type":"ContainerDied","Data":"d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a"} Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.310502 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b68df49d-vntzm" event={"ID":"3675d011-f8eb-4e9a-9000-85e976ed2ee7","Type":"ContainerDied","Data":"74c394c0acaf4fb0ce2f4f7526c38caa5c9aed3127066238d6ea98a396003d07"} Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.310502 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b68df49d-vntzm" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.310523 4684 scope.go:117] "RemoveContainer" containerID="d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.314867 4684 generic.go:334] "Generic (PLEG): container finished" podID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerID="287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14" exitCode=0 Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.314941 4684 generic.go:334] "Generic (PLEG): container finished" podID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerID="4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4" exitCode=143 Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.315491 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.316055 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"60ea9822-2a7b-4b65-9267-6d6bca3b38ac","Type":"ContainerDied","Data":"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14"} Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.316089 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"60ea9822-2a7b-4b65-9267-6d6bca3b38ac","Type":"ContainerDied","Data":"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4"} Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.316107 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"60ea9822-2a7b-4b65-9267-6d6bca3b38ac","Type":"ContainerDied","Data":"99c4a58aea03888e7a4d9c645a31225bba3c564c3fa3ef54866fb14f70840191"} Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.338073 4684 scope.go:117] "RemoveContainer" containerID="60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.360832 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-58b68df49d-vntzm"] Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.361068 4684 scope.go:117] "RemoveContainer" containerID="d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.361561 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a\": container with ID starting with d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a not found: ID does not exist" containerID="d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.361596 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a"} err="failed to get container status \"d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a\": rpc error: code = NotFound desc = could not find container \"d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a\": container with ID starting with d966cc581389a2bc40d8e6cc7843510c6a9327b86481eb8fd0454f654a75eb9a not found: ID does not exist" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.361618 4684 scope.go:117] "RemoveContainer" containerID="60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.361973 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26\": container with ID starting with 60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26 not found: ID does not exist" containerID="60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.362160 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26"} err="failed to get container status \"60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26\": rpc error: code = NotFound desc = could not find container \"60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26\": container with ID starting with 60df7570ce1d906ed17ebe4800473f8a461210e8ed9c4ddc15edd27872dabc26 not found: ID does not exist" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.362305 4684 scope.go:117] "RemoveContainer" containerID="287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.372197 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-58b68df49d-vntzm"] Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.379997 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.388097 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.394304 4684 scope.go:117] "RemoveContainer" containerID="4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.401630 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.402083 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402104 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.402117 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402124 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.402133 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api-log" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402139 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api-log" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.402191 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api-log" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402199 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api-log" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402393 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api-log" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402405 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api-log" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402418 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" containerName="cinder-api" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.402432 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" containerName="barbican-api" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.403321 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.410540 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.410622 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.410540 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.419217 4684 scope.go:117] "RemoveContainer" containerID="287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.419965 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14\": container with ID starting with 287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14 not found: ID does not exist" containerID="287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420016 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14"} err="failed to get container status \"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14\": rpc error: code = NotFound desc = could not find container \"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14\": container with ID starting with 287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14 not found: ID does not exist" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420043 4684 scope.go:117] "RemoveContainer" containerID="4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4" Oct 13 13:23:49 crc kubenswrapper[4684]: E1013 13:23:49.420314 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4\": container with ID starting with 4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4 not found: ID does not exist" containerID="4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420345 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4"} err="failed to get container status \"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4\": rpc error: code = NotFound desc = could not find container \"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4\": container with ID starting with 4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4 not found: ID does not exist" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420382 4684 scope.go:117] "RemoveContainer" containerID="287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420592 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14"} err="failed to get container status \"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14\": rpc error: code = NotFound desc = could not find container \"287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14\": container with ID starting with 287980f530f69bb52a74acf5271fb314d8802d28e03ddce080f8c1a28e34eb14 not found: ID does not exist" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420610 4684 scope.go:117] "RemoveContainer" containerID="4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.420799 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4"} err="failed to get container status \"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4\": rpc error: code = NotFound desc = could not find container \"4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4\": container with ID starting with 4697ef3059c6ae9db5fc176662d5bfbcbe9716bed50bf39e4058f6119b23fec4 not found: ID does not exist" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.426800 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439072 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-config-data-custom\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439136 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-config-data\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439152 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v66g\" (UniqueName: \"kubernetes.io/projected/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-kube-api-access-6v66g\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439183 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439203 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439262 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-scripts\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439278 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439304 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-logs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.439357 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.540726 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-config-data-custom\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.540791 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-config-data\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.540821 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v66g\" (UniqueName: \"kubernetes.io/projected/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-kube-api-access-6v66g\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.540867 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.540933 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.540992 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-scripts\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.541018 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.541044 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-logs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.541089 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.541220 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.542150 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-logs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.546325 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-config-data\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.546465 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.547377 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-scripts\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.547432 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.547724 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.548629 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-config-data-custom\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.562402 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v66g\" (UniqueName: \"kubernetes.io/projected/4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e-kube-api-access-6v66g\") pod \"cinder-api-0\" (UID: \"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e\") " pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.738722 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 13:23:49 crc kubenswrapper[4684]: I1013 13:23:49.864211 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.198596 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 13:23:50 crc kubenswrapper[4684]: W1013 13:23:50.207743 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4873cf0b_4aca_4d5a_a7d0_4cbc8ceb1b1e.slice/crio-980e4ada8de93409d0f50e855f71ca3130e7ec1be7019e4a0dde9f8c14c2e6e1 WatchSource:0}: Error finding container 980e4ada8de93409d0f50e855f71ca3130e7ec1be7019e4a0dde9f8c14c2e6e1: Status 404 returned error can't find the container with id 980e4ada8de93409d0f50e855f71ca3130e7ec1be7019e4a0dde9f8c14c2e6e1 Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.324108 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e","Type":"ContainerStarted","Data":"980e4ada8de93409d0f50e855f71ca3130e7ec1be7019e4a0dde9f8c14c2e6e1"} Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.329546 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerStarted","Data":"f448da61456e3820c532c93022e553e5fdd5d29a59b840ff39f514334d7c821f"} Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.330252 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.356320 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.126586763 podStartE2EDuration="5.356299269s" podCreationTimestamp="2025-10-13 13:23:45 +0000 UTC" firstStartedPulling="2025-10-13 13:23:46.372972133 +0000 UTC m=+980.940356193" lastFinishedPulling="2025-10-13 13:23:49.602684579 +0000 UTC m=+984.170068699" observedRunningTime="2025-10-13 13:23:50.346796266 +0000 UTC m=+984.914180336" watchObservedRunningTime="2025-10-13 13:23:50.356299269 +0000 UTC m=+984.923683339" Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.367734 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3675d011-f8eb-4e9a-9000-85e976ed2ee7" path="/var/lib/kubelet/pods/3675d011-f8eb-4e9a-9000-85e976ed2ee7/volumes" Oct 13 13:23:50 crc kubenswrapper[4684]: I1013 13:23:50.368385 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ea9822-2a7b-4b65-9267-6d6bca3b38ac" path="/var/lib/kubelet/pods/60ea9822-2a7b-4b65-9267-6d6bca3b38ac/volumes" Oct 13 13:23:51 crc kubenswrapper[4684]: I1013 13:23:51.340442 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e","Type":"ContainerStarted","Data":"e05dbf4211a01042ae3c46a5495cb0c3cf38acea40a3cd4d0a43e557aaff8a99"} Oct 13 13:23:52 crc kubenswrapper[4684]: I1013 13:23:52.018636 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:52 crc kubenswrapper[4684]: I1013 13:23:52.364665 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 13 13:23:52 crc kubenswrapper[4684]: I1013 13:23:52.364975 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e","Type":"ContainerStarted","Data":"7d0fb34440d209a367d417c8f1a131a2dfe803c26ccfc650d0e5a5882ccfea9c"} Oct 13 13:23:52 crc kubenswrapper[4684]: I1013 13:23:52.379727 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.379707655 podStartE2EDuration="3.379707655s" podCreationTimestamp="2025-10-13 13:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:23:52.375091128 +0000 UTC m=+986.942475188" watchObservedRunningTime="2025-10-13 13:23:52.379707655 +0000 UTC m=+986.947091725" Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.686323 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-594469df77-tzkgb" Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.764157 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6b56fc677b-j59pp"] Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.764579 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6b56fc677b-j59pp" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-api" containerID="cri-o://2ae7c125aa1f81711b939cbbc2db105dada3d6a3ec0bdff28919cad1b29460ba" gracePeriod=30 Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.764719 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6b56fc677b-j59pp" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-httpd" containerID="cri-o://3a538c440199f1d6b939bee45f3df3d443fd3b6c29e047035086bbfa7bfbc86c" gracePeriod=30 Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.886125 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.939500 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b5dbf95fc-22v5n"] Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.939726 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="dnsmasq-dns" containerID="cri-o://039fdb9bd806b764efc9f130dc2b57f0d51788963880670cb44c25d3a7285915" gracePeriod=10 Oct 13 13:23:54 crc kubenswrapper[4684]: I1013 13:23:54.999271 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: connect: connection refused" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.150610 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.194407 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.390191 4684 generic.go:334] "Generic (PLEG): container finished" podID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerID="3a538c440199f1d6b939bee45f3df3d443fd3b6c29e047035086bbfa7bfbc86c" exitCode=0 Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.390283 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56fc677b-j59pp" event={"ID":"02bfc416-ee41-4c39-90ff-7debeec43ce5","Type":"ContainerDied","Data":"3a538c440199f1d6b939bee45f3df3d443fd3b6c29e047035086bbfa7bfbc86c"} Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.392172 4684 generic.go:334] "Generic (PLEG): container finished" podID="a6ba537f-3303-4324-b704-22ca556a0f30" containerID="039fdb9bd806b764efc9f130dc2b57f0d51788963880670cb44c25d3a7285915" exitCode=0 Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.392244 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" event={"ID":"a6ba537f-3303-4324-b704-22ca556a0f30","Type":"ContainerDied","Data":"039fdb9bd806b764efc9f130dc2b57f0d51788963880670cb44c25d3a7285915"} Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.392880 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="cinder-scheduler" containerID="cri-o://eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d" gracePeriod=30 Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.392945 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="probe" containerID="cri-o://d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861" gracePeriod=30 Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.486059 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.595879 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gzgr\" (UniqueName: \"kubernetes.io/projected/a6ba537f-3303-4324-b704-22ca556a0f30-kube-api-access-6gzgr\") pod \"a6ba537f-3303-4324-b704-22ca556a0f30\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.596406 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-svc\") pod \"a6ba537f-3303-4324-b704-22ca556a0f30\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.596451 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-swift-storage-0\") pod \"a6ba537f-3303-4324-b704-22ca556a0f30\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.596502 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-nb\") pod \"a6ba537f-3303-4324-b704-22ca556a0f30\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.596584 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-sb\") pod \"a6ba537f-3303-4324-b704-22ca556a0f30\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.596613 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-config\") pod \"a6ba537f-3303-4324-b704-22ca556a0f30\" (UID: \"a6ba537f-3303-4324-b704-22ca556a0f30\") " Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.615169 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6ba537f-3303-4324-b704-22ca556a0f30-kube-api-access-6gzgr" (OuterVolumeSpecName: "kube-api-access-6gzgr") pod "a6ba537f-3303-4324-b704-22ca556a0f30" (UID: "a6ba537f-3303-4324-b704-22ca556a0f30"). InnerVolumeSpecName "kube-api-access-6gzgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.654621 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a6ba537f-3303-4324-b704-22ca556a0f30" (UID: "a6ba537f-3303-4324-b704-22ca556a0f30"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.663117 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-config" (OuterVolumeSpecName: "config") pod "a6ba537f-3303-4324-b704-22ca556a0f30" (UID: "a6ba537f-3303-4324-b704-22ca556a0f30"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.665027 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6ba537f-3303-4324-b704-22ca556a0f30" (UID: "a6ba537f-3303-4324-b704-22ca556a0f30"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.668428 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a6ba537f-3303-4324-b704-22ca556a0f30" (UID: "a6ba537f-3303-4324-b704-22ca556a0f30"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.673643 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a6ba537f-3303-4324-b704-22ca556a0f30" (UID: "a6ba537f-3303-4324-b704-22ca556a0f30"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.698596 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gzgr\" (UniqueName: \"kubernetes.io/projected/a6ba537f-3303-4324-b704-22ca556a0f30-kube-api-access-6gzgr\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.699489 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.699553 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.699651 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.699711 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:55 crc kubenswrapper[4684]: I1013 13:23:55.699777 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ba537f-3303-4324-b704-22ca556a0f30-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.403983 4684 generic.go:334] "Generic (PLEG): container finished" podID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerID="d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861" exitCode=0 Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.404035 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f","Type":"ContainerDied","Data":"d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861"} Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.407162 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" event={"ID":"a6ba537f-3303-4324-b704-22ca556a0f30","Type":"ContainerDied","Data":"8411ebb30eccdd7bc0c727172f691e60e9a1a1cf506c3c095e2b3ce5df3d42ca"} Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.407227 4684 scope.go:117] "RemoveContainer" containerID="039fdb9bd806b764efc9f130dc2b57f0d51788963880670cb44c25d3a7285915" Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.407399 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b5dbf95fc-22v5n" Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.437616 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b5dbf95fc-22v5n"] Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.438361 4684 scope.go:117] "RemoveContainer" containerID="6eb24f1be35ec2c38fda6c16961c19cd6e9bd43a8af574380b5fb9550412fac4" Oct 13 13:23:56 crc kubenswrapper[4684]: I1013 13:23:56.446700 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b5dbf95fc-22v5n"] Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.339159 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.362961 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" path="/var/lib/kubelet/pods/a6ba537f-3303-4324-b704-22ca556a0f30/volumes" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.440362 4684 generic.go:334] "Generic (PLEG): container finished" podID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerID="2ae7c125aa1f81711b939cbbc2db105dada3d6a3ec0bdff28919cad1b29460ba" exitCode=0 Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.440427 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56fc677b-j59pp" event={"ID":"02bfc416-ee41-4c39-90ff-7debeec43ce5","Type":"ContainerDied","Data":"2ae7c125aa1f81711b939cbbc2db105dada3d6a3ec0bdff28919cad1b29460ba"} Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.443386 4684 generic.go:334] "Generic (PLEG): container finished" podID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerID="eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d" exitCode=0 Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.443412 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f","Type":"ContainerDied","Data":"eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d"} Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.443436 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f","Type":"ContainerDied","Data":"4e4b6158606f9eaa24346eca1b78e501c0674f7d60d318d91ef1a0b089b56ec9"} Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.443455 4684 scope.go:117] "RemoveContainer" containerID="d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.443595 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.451145 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-scripts\") pod \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.451265 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data\") pod \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.451305 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb64p\" (UniqueName: \"kubernetes.io/projected/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-kube-api-access-fb64p\") pod \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.451435 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-etc-machine-id\") pod \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.451463 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-combined-ca-bundle\") pod \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.451487 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data-custom\") pod \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\" (UID: \"7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.453617 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" (UID: "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.457734 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-scripts" (OuterVolumeSpecName: "scripts") pod "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" (UID: "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.458856 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" (UID: "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.462711 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-kube-api-access-fb64p" (OuterVolumeSpecName: "kube-api-access-fb64p") pod "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" (UID: "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f"). InnerVolumeSpecName "kube-api-access-fb64p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.531142 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" (UID: "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.553521 4684 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.553555 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.553565 4684 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.553575 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.553586 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb64p\" (UniqueName: \"kubernetes.io/projected/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-kube-api-access-fb64p\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.556302 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data" (OuterVolumeSpecName: "config-data") pod "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" (UID: "7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.610526 4684 scope.go:117] "RemoveContainer" containerID="eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.629616 4684 scope.go:117] "RemoveContainer" containerID="d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.629952 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861\": container with ID starting with d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861 not found: ID does not exist" containerID="d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.629979 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861"} err="failed to get container status \"d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861\": rpc error: code = NotFound desc = could not find container \"d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861\": container with ID starting with d17c2debf5ad71814d77f1b313e96c452f065d0750ee6b6deb255abf23a5e861 not found: ID does not exist" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.629999 4684 scope.go:117] "RemoveContainer" containerID="eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.630209 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d\": container with ID starting with eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d not found: ID does not exist" containerID="eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.630229 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d"} err="failed to get container status \"eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d\": rpc error: code = NotFound desc = could not find container \"eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d\": container with ID starting with eac3bf14280a8d169bee838d8fed4f0f6cc48ca208630b9ca1bf195784d1fe8d not found: ID does not exist" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.655629 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.680409 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.757090 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-combined-ca-bundle\") pod \"02bfc416-ee41-4c39-90ff-7debeec43ce5\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.757531 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpg2w\" (UniqueName: \"kubernetes.io/projected/02bfc416-ee41-4c39-90ff-7debeec43ce5-kube-api-access-gpg2w\") pod \"02bfc416-ee41-4c39-90ff-7debeec43ce5\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.757690 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-config\") pod \"02bfc416-ee41-4c39-90ff-7debeec43ce5\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.758035 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-ovndb-tls-certs\") pod \"02bfc416-ee41-4c39-90ff-7debeec43ce5\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.758157 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-httpd-config\") pod \"02bfc416-ee41-4c39-90ff-7debeec43ce5\" (UID: \"02bfc416-ee41-4c39-90ff-7debeec43ce5\") " Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.761269 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "02bfc416-ee41-4c39-90ff-7debeec43ce5" (UID: "02bfc416-ee41-4c39-90ff-7debeec43ce5"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.763424 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02bfc416-ee41-4c39-90ff-7debeec43ce5-kube-api-access-gpg2w" (OuterVolumeSpecName: "kube-api-access-gpg2w") pod "02bfc416-ee41-4c39-90ff-7debeec43ce5" (UID: "02bfc416-ee41-4c39-90ff-7debeec43ce5"). InnerVolumeSpecName "kube-api-access-gpg2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.782703 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.795690 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.816763 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.817319 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="cinder-scheduler" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817351 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="cinder-scheduler" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.817372 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="dnsmasq-dns" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817383 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="dnsmasq-dns" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.817420 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="init" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817432 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="init" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.817453 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-api" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817466 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-api" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.817479 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="probe" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817491 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="probe" Oct 13 13:23:58 crc kubenswrapper[4684]: E1013 13:23:58.817524 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-httpd" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817535 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-httpd" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.817863 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-api" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.818315 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6ba537f-3303-4324-b704-22ca556a0f30" containerName="dnsmasq-dns" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.818494 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="cinder-scheduler" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.818511 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" containerName="probe" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.818535 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" containerName="neutron-httpd" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.820279 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.833082 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.841360 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.848851 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-config" (OuterVolumeSpecName: "config") pod "02bfc416-ee41-4c39-90ff-7debeec43ce5" (UID: "02bfc416-ee41-4c39-90ff-7debeec43ce5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.853846 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02bfc416-ee41-4c39-90ff-7debeec43ce5" (UID: "02bfc416-ee41-4c39-90ff-7debeec43ce5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860158 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br7r5\" (UniqueName: \"kubernetes.io/projected/17d5a5ba-88b1-4769-a5fe-5f950804f332-kube-api-access-br7r5\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860310 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-scripts\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860336 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-config-data\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860369 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17d5a5ba-88b1-4769-a5fe-5f950804f332-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860388 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860404 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860580 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpg2w\" (UniqueName: \"kubernetes.io/projected/02bfc416-ee41-4c39-90ff-7debeec43ce5-kube-api-access-gpg2w\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860609 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860621 4684 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.860630 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.861407 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "02bfc416-ee41-4c39-90ff-7debeec43ce5" (UID: "02bfc416-ee41-4c39-90ff-7debeec43ce5"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962333 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-scripts\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962761 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-config-data\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962799 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17d5a5ba-88b1-4769-a5fe-5f950804f332-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962825 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962845 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962954 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17d5a5ba-88b1-4769-a5fe-5f950804f332-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.962971 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br7r5\" (UniqueName: \"kubernetes.io/projected/17d5a5ba-88b1-4769-a5fe-5f950804f332-kube-api-access-br7r5\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.963079 4684 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/02bfc416-ee41-4c39-90ff-7debeec43ce5-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.965484 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-scripts\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.966243 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.966710 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-config-data\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.970444 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17d5a5ba-88b1-4769-a5fe-5f950804f332-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:58 crc kubenswrapper[4684]: I1013 13:23:58.980519 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br7r5\" (UniqueName: \"kubernetes.io/projected/17d5a5ba-88b1-4769-a5fe-5f950804f332-kube-api-access-br7r5\") pod \"cinder-scheduler-0\" (UID: \"17d5a5ba-88b1-4769-a5fe-5f950804f332\") " pod="openstack/cinder-scheduler-0" Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.145989 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.457879 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b56fc677b-j59pp" event={"ID":"02bfc416-ee41-4c39-90ff-7debeec43ce5","Type":"ContainerDied","Data":"bf995c528a1c22e147769fd50ed0a26a4133aebe7434a123ce279ce5a7e89f9f"} Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.458006 4684 scope.go:117] "RemoveContainer" containerID="3a538c440199f1d6b939bee45f3df3d443fd3b6c29e047035086bbfa7bfbc86c" Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.458060 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b56fc677b-j59pp" Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.491451 4684 scope.go:117] "RemoveContainer" containerID="2ae7c125aa1f81711b939cbbc2db105dada3d6a3ec0bdff28919cad1b29460ba" Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.510374 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6b56fc677b-j59pp"] Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.518886 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6b56fc677b-j59pp"] Oct 13 13:23:59 crc kubenswrapper[4684]: I1013 13:23:59.602649 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 13:23:59 crc kubenswrapper[4684]: W1013 13:23:59.619361 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17d5a5ba_88b1_4769_a5fe_5f950804f332.slice/crio-e0c108f11c8895e3b29851f1a09b4c18ae55b8453baa552575eeaa41b7216eb6 WatchSource:0}: Error finding container e0c108f11c8895e3b29851f1a09b4c18ae55b8453baa552575eeaa41b7216eb6: Status 404 returned error can't find the container with id e0c108f11c8895e3b29851f1a09b4c18ae55b8453baa552575eeaa41b7216eb6 Oct 13 13:24:00 crc kubenswrapper[4684]: I1013 13:24:00.367708 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02bfc416-ee41-4c39-90ff-7debeec43ce5" path="/var/lib/kubelet/pods/02bfc416-ee41-4c39-90ff-7debeec43ce5/volumes" Oct 13 13:24:00 crc kubenswrapper[4684]: I1013 13:24:00.368704 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f" path="/var/lib/kubelet/pods/7f4b5f89-ca90-419c-8fa2-96a6d4f8e36f/volumes" Oct 13 13:24:00 crc kubenswrapper[4684]: I1013 13:24:00.471947 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"17d5a5ba-88b1-4769-a5fe-5f950804f332","Type":"ContainerStarted","Data":"47070a9d7a7c02bb4b32b3e0abe6a8738748eedb2ee913f36c12adbf6c5f5735"} Oct 13 13:24:00 crc kubenswrapper[4684]: I1013 13:24:00.471986 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"17d5a5ba-88b1-4769-a5fe-5f950804f332","Type":"ContainerStarted","Data":"e0c108f11c8895e3b29851f1a09b4c18ae55b8453baa552575eeaa41b7216eb6"} Oct 13 13:24:00 crc kubenswrapper[4684]: I1013 13:24:00.835422 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:24:00 crc kubenswrapper[4684]: I1013 13:24:00.836642 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5854cb58cb-4hhrx" Oct 13 13:24:01 crc kubenswrapper[4684]: I1013 13:24:01.379929 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5c87fbbfdb-hnmd8" Oct 13 13:24:01 crc kubenswrapper[4684]: I1013 13:24:01.482788 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"17d5a5ba-88b1-4769-a5fe-5f950804f332","Type":"ContainerStarted","Data":"78d8bf2432f2e5fd4915c5d581027df02bdaf10d9fa5e5882d388913df3f34e0"} Oct 13 13:24:01 crc kubenswrapper[4684]: I1013 13:24:01.506079 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.506063611 podStartE2EDuration="3.506063611s" podCreationTimestamp="2025-10-13 13:23:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:24:01.497059683 +0000 UTC m=+996.064443763" watchObservedRunningTime="2025-10-13 13:24:01.506063611 +0000 UTC m=+996.073447681" Oct 13 13:24:01 crc kubenswrapper[4684]: I1013 13:24:01.972175 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.465945 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.467324 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.469887 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.470117 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.482993 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-dcd5w" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.484313 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.588800 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/706a05c9-47ec-4b10-a5de-227f67b3be61-openstack-config\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.588849 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/706a05c9-47ec-4b10-a5de-227f67b3be61-openstack-config-secret\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.588961 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc6r6\" (UniqueName: \"kubernetes.io/projected/706a05c9-47ec-4b10-a5de-227f67b3be61-kube-api-access-rc6r6\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.589019 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/706a05c9-47ec-4b10-a5de-227f67b3be61-combined-ca-bundle\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.690929 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/706a05c9-47ec-4b10-a5de-227f67b3be61-openstack-config\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.690983 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/706a05c9-47ec-4b10-a5de-227f67b3be61-openstack-config-secret\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.691024 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc6r6\" (UniqueName: \"kubernetes.io/projected/706a05c9-47ec-4b10-a5de-227f67b3be61-kube-api-access-rc6r6\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.691070 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/706a05c9-47ec-4b10-a5de-227f67b3be61-combined-ca-bundle\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.692006 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/706a05c9-47ec-4b10-a5de-227f67b3be61-openstack-config\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.697002 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/706a05c9-47ec-4b10-a5de-227f67b3be61-openstack-config-secret\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.697657 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/706a05c9-47ec-4b10-a5de-227f67b3be61-combined-ca-bundle\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.709891 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc6r6\" (UniqueName: \"kubernetes.io/projected/706a05c9-47ec-4b10-a5de-227f67b3be61-kube-api-access-rc6r6\") pod \"openstackclient\" (UID: \"706a05c9-47ec-4b10-a5de-227f67b3be61\") " pod="openstack/openstackclient" Oct 13 13:24:03 crc kubenswrapper[4684]: I1013 13:24:03.805283 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 13 13:24:04 crc kubenswrapper[4684]: I1013 13:24:04.147438 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 13 13:24:04 crc kubenswrapper[4684]: I1013 13:24:04.252043 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 13 13:24:04 crc kubenswrapper[4684]: I1013 13:24:04.515372 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"706a05c9-47ec-4b10-a5de-227f67b3be61","Type":"ContainerStarted","Data":"21a4e4f0e66541e3f6b1d31fe3047084c24617accdce506a9a0f9fd3817eedd3"} Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.875557 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-56f9bb58f9-k8bsc"] Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.877886 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.880192 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.880520 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.881603 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.888128 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-56f9bb58f9-k8bsc"] Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969171 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnwls\" (UniqueName: \"kubernetes.io/projected/61ca624d-dfba-4a64-b08f-e96cc583a2b8-kube-api-access-cnwls\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969225 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ca624d-dfba-4a64-b08f-e96cc583a2b8-log-httpd\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969395 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/61ca624d-dfba-4a64-b08f-e96cc583a2b8-etc-swift\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969620 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-config-data\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969657 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-combined-ca-bundle\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969711 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ca624d-dfba-4a64-b08f-e96cc583a2b8-run-httpd\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969867 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-public-tls-certs\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:07 crc kubenswrapper[4684]: I1013 13:24:07.969990 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-internal-tls-certs\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071158 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-config-data\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071197 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-combined-ca-bundle\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071219 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ca624d-dfba-4a64-b08f-e96cc583a2b8-run-httpd\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071259 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-public-tls-certs\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071286 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-internal-tls-certs\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071334 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnwls\" (UniqueName: \"kubernetes.io/projected/61ca624d-dfba-4a64-b08f-e96cc583a2b8-kube-api-access-cnwls\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071350 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ca624d-dfba-4a64-b08f-e96cc583a2b8-log-httpd\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071385 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/61ca624d-dfba-4a64-b08f-e96cc583a2b8-etc-swift\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.071825 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ca624d-dfba-4a64-b08f-e96cc583a2b8-run-httpd\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.072189 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/61ca624d-dfba-4a64-b08f-e96cc583a2b8-log-httpd\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.079018 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/61ca624d-dfba-4a64-b08f-e96cc583a2b8-etc-swift\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.080134 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-internal-tls-certs\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.081612 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-config-data\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.081619 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-public-tls-certs\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.082224 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61ca624d-dfba-4a64-b08f-e96cc583a2b8-combined-ca-bundle\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.088683 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnwls\" (UniqueName: \"kubernetes.io/projected/61ca624d-dfba-4a64-b08f-e96cc583a2b8-kube-api-access-cnwls\") pod \"swift-proxy-56f9bb58f9-k8bsc\" (UID: \"61ca624d-dfba-4a64-b08f-e96cc583a2b8\") " pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.216360 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:08 crc kubenswrapper[4684]: I1013 13:24:08.741893 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-56f9bb58f9-k8bsc"] Oct 13 13:24:08 crc kubenswrapper[4684]: W1013 13:24:08.744871 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61ca624d_dfba_4a64_b08f_e96cc583a2b8.slice/crio-1eea321bc278160ed406941e5e20a454d9aab5cc495af303e9e6c405b352d01d WatchSource:0}: Error finding container 1eea321bc278160ed406941e5e20a454d9aab5cc495af303e9e6c405b352d01d: Status 404 returned error can't find the container with id 1eea321bc278160ed406941e5e20a454d9aab5cc495af303e9e6c405b352d01d Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.244386 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.245864 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-log" containerID="cri-o://01dfdd6bd7ba8cdb4ac3f7d9c19e722938ed613611b99affe81a6e24097183f6" gracePeriod=30 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.246010 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-httpd" containerID="cri-o://f90f04890d5085d43859257a2af92284eedd52346c1b2550a194f0d9b964f609" gracePeriod=30 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.391760 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.392073 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-central-agent" containerID="cri-o://fee5531fd6f9aa0296eedd00fbd369fe6d94645d5638b62aef8b0bc6b67c3ae7" gracePeriod=30 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.392140 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="proxy-httpd" containerID="cri-o://f448da61456e3820c532c93022e553e5fdd5d29a59b840ff39f514334d7c821f" gracePeriod=30 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.392192 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="sg-core" containerID="cri-o://d00f1d8dfbe3014f240aed3605e4b80c496c7b7d08a8255f7a51f3e8b87845a3" gracePeriod=30 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.392242 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-notification-agent" containerID="cri-o://c41a6faeddb50508f6b88725e895922257647635549bbc91beadcd5c799011ea" gracePeriod=30 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.426887 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.497251 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.161:3000/\": read tcp 10.217.0.2:38916->10.217.0.161:3000: read: connection reset by peer" Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.562149 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" event={"ID":"61ca624d-dfba-4a64-b08f-e96cc583a2b8","Type":"ContainerStarted","Data":"fd8d0fdf29b77495ccbd182760124eb95e2e601dbdd5b352481c7a2bc7edc0f2"} Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.562196 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" event={"ID":"61ca624d-dfba-4a64-b08f-e96cc583a2b8","Type":"ContainerStarted","Data":"a47be16627b7322602668aeee1c87ab266159f22e21cfe1c71ffff46e85da01e"} Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.562206 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" event={"ID":"61ca624d-dfba-4a64-b08f-e96cc583a2b8","Type":"ContainerStarted","Data":"1eea321bc278160ed406941e5e20a454d9aab5cc495af303e9e6c405b352d01d"} Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.562332 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.564772 4684 generic.go:334] "Generic (PLEG): container finished" podID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerID="01dfdd6bd7ba8cdb4ac3f7d9c19e722938ed613611b99affe81a6e24097183f6" exitCode=143 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.564834 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ffb05bd-2aa7-44dd-a791-859bf7b47747","Type":"ContainerDied","Data":"01dfdd6bd7ba8cdb4ac3f7d9c19e722938ed613611b99affe81a6e24097183f6"} Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.570077 4684 generic.go:334] "Generic (PLEG): container finished" podID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerID="f448da61456e3820c532c93022e553e5fdd5d29a59b840ff39f514334d7c821f" exitCode=0 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.570109 4684 generic.go:334] "Generic (PLEG): container finished" podID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerID="d00f1d8dfbe3014f240aed3605e4b80c496c7b7d08a8255f7a51f3e8b87845a3" exitCode=2 Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.570133 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerDied","Data":"f448da61456e3820c532c93022e553e5fdd5d29a59b840ff39f514334d7c821f"} Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.570161 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerDied","Data":"d00f1d8dfbe3014f240aed3605e4b80c496c7b7d08a8255f7a51f3e8b87845a3"} Oct 13 13:24:09 crc kubenswrapper[4684]: I1013 13:24:09.584067 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" podStartSLOduration=2.584046484 podStartE2EDuration="2.584046484s" podCreationTimestamp="2025-10-13 13:24:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:24:09.579996435 +0000 UTC m=+1004.147380515" watchObservedRunningTime="2025-10-13 13:24:09.584046484 +0000 UTC m=+1004.151430554" Oct 13 13:24:10 crc kubenswrapper[4684]: I1013 13:24:10.591758 4684 generic.go:334] "Generic (PLEG): container finished" podID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerID="fee5531fd6f9aa0296eedd00fbd369fe6d94645d5638b62aef8b0bc6b67c3ae7" exitCode=0 Oct 13 13:24:10 crc kubenswrapper[4684]: I1013 13:24:10.591842 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerDied","Data":"fee5531fd6f9aa0296eedd00fbd369fe6d94645d5638b62aef8b0bc6b67c3ae7"} Oct 13 13:24:10 crc kubenswrapper[4684]: I1013 13:24:10.592355 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:11 crc kubenswrapper[4684]: I1013 13:24:11.613085 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:24:11 crc kubenswrapper[4684]: I1013 13:24:11.613351 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-log" containerID="cri-o://da779bff9234e8b2b0df6f3e100e87fde96f281ff3439e27f903f2341ab4d3a8" gracePeriod=30 Oct 13 13:24:11 crc kubenswrapper[4684]: I1013 13:24:11.613836 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-httpd" containerID="cri-o://ee6936771997352ccf4f2df55b3b9633e81b83f856e1065eb37b4c3b1b8ddc03" gracePeriod=30 Oct 13 13:24:12 crc kubenswrapper[4684]: I1013 13:24:12.616739 4684 generic.go:334] "Generic (PLEG): container finished" podID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerID="da779bff9234e8b2b0df6f3e100e87fde96f281ff3439e27f903f2341ab4d3a8" exitCode=143 Oct 13 13:24:12 crc kubenswrapper[4684]: I1013 13:24:12.616812 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e4b1e89-804b-413c-8d0d-7f89ce8f715d","Type":"ContainerDied","Data":"da779bff9234e8b2b0df6f3e100e87fde96f281ff3439e27f903f2341ab4d3a8"} Oct 13 13:24:12 crc kubenswrapper[4684]: I1013 13:24:12.623604 4684 generic.go:334] "Generic (PLEG): container finished" podID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerID="f90f04890d5085d43859257a2af92284eedd52346c1b2550a194f0d9b964f609" exitCode=0 Oct 13 13:24:12 crc kubenswrapper[4684]: I1013 13:24:12.623651 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ffb05bd-2aa7-44dd-a791-859bf7b47747","Type":"ContainerDied","Data":"f90f04890d5085d43859257a2af92284eedd52346c1b2550a194f0d9b964f609"} Oct 13 13:24:13 crc kubenswrapper[4684]: I1013 13:24:13.229654 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:13 crc kubenswrapper[4684]: I1013 13:24:13.636474 4684 generic.go:334] "Generic (PLEG): container finished" podID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerID="c41a6faeddb50508f6b88725e895922257647635549bbc91beadcd5c799011ea" exitCode=0 Oct 13 13:24:13 crc kubenswrapper[4684]: I1013 13:24:13.636517 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerDied","Data":"c41a6faeddb50508f6b88725e895922257647635549bbc91beadcd5c799011ea"} Oct 13 13:24:14 crc kubenswrapper[4684]: I1013 13:24:14.756084 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.147:9292/healthcheck\": read tcp 10.217.0.2:53590->10.217.0.147:9292: read: connection reset by peer" Oct 13 13:24:14 crc kubenswrapper[4684]: I1013 13:24:14.756183 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.147:9292/healthcheck\": read tcp 10.217.0.2:53584->10.217.0.147:9292: read: connection reset by peer" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.653377 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.674287 4684 generic.go:334] "Generic (PLEG): container finished" podID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerID="ee6936771997352ccf4f2df55b3b9633e81b83f856e1065eb37b4c3b1b8ddc03" exitCode=0 Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.674363 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e4b1e89-804b-413c-8d0d-7f89ce8f715d","Type":"ContainerDied","Data":"ee6936771997352ccf4f2df55b3b9633e81b83f856e1065eb37b4c3b1b8ddc03"} Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.696694 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72ac328b-e864-4a16-954e-52fc0e0972e5","Type":"ContainerDied","Data":"1e1888132d74d5120b37f08ebdc58f4e8d41bd4dc6e40efd8bbd97300161fee0"} Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.696742 4684 scope.go:117] "RemoveContainer" containerID="f448da61456e3820c532c93022e553e5fdd5d29a59b840ff39f514334d7c821f" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.696876 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.716694 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"706a05c9-47ec-4b10-a5de-227f67b3be61","Type":"ContainerStarted","Data":"d23060b245e1afab8ba7b0fd3b36ce4320466884b70a861c6d7cc6a248683f5c"} Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.735505 4684 scope.go:117] "RemoveContainer" containerID="d00f1d8dfbe3014f240aed3605e4b80c496c7b7d08a8255f7a51f3e8b87845a3" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.748836 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.591259266 podStartE2EDuration="12.748817057s" podCreationTimestamp="2025-10-13 13:24:03 +0000 UTC" firstStartedPulling="2025-10-13 13:24:04.257030174 +0000 UTC m=+998.824414244" lastFinishedPulling="2025-10-13 13:24:15.414587955 +0000 UTC m=+1009.981972035" observedRunningTime="2025-10-13 13:24:15.738110885 +0000 UTC m=+1010.305494975" watchObservedRunningTime="2025-10-13 13:24:15.748817057 +0000 UTC m=+1010.316201127" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.776061 4684 scope.go:117] "RemoveContainer" containerID="c41a6faeddb50508f6b88725e895922257647635549bbc91beadcd5c799011ea" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.800954 4684 scope.go:117] "RemoveContainer" containerID="fee5531fd6f9aa0296eedd00fbd369fe6d94645d5638b62aef8b0bc6b67c3ae7" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.814508 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.826912 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-sg-core-conf-yaml\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.827045 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98l94\" (UniqueName: \"kubernetes.io/projected/72ac328b-e864-4a16-954e-52fc0e0972e5-kube-api-access-98l94\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.827105 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-scripts\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.827205 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-run-httpd\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.827225 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-config-data\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.827276 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-log-httpd\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.827336 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-combined-ca-bundle\") pod \"72ac328b-e864-4a16-954e-52fc0e0972e5\" (UID: \"72ac328b-e864-4a16-954e-52fc0e0972e5\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.828290 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.828501 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.853094 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-scripts" (OuterVolumeSpecName: "scripts") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.862870 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72ac328b-e864-4a16-954e-52fc0e0972e5-kube-api-access-98l94" (OuterVolumeSpecName: "kube-api-access-98l94") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "kube-api-access-98l94". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.869227 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.916467 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.928774 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-combined-ca-bundle\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.928970 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929046 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-logs\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929093 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-httpd-run\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929125 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvphh\" (UniqueName: \"kubernetes.io/projected/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-kube-api-access-pvphh\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929163 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-config-data\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929252 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-scripts\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929322 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-internal-tls-certs\") pod \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\" (UID: \"4e4b1e89-804b-413c-8d0d-7f89ce8f715d\") " Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.929799 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.930210 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-logs" (OuterVolumeSpecName: "logs") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.931306 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.931468 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.932084 4684 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.932193 4684 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.932669 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98l94\" (UniqueName: \"kubernetes.io/projected/72ac328b-e864-4a16-954e-52fc0e0972e5-kube-api-access-98l94\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.932777 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.932874 4684 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.932976 4684 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72ac328b-e864-4a16-954e-52fc0e0972e5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.934158 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-scripts" (OuterVolumeSpecName: "scripts") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.937124 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-kube-api-access-pvphh" (OuterVolumeSpecName: "kube-api-access-pvphh") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "kube-api-access-pvphh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.941029 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.955092 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.975520 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-config-data" (OuterVolumeSpecName: "config-data") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.990561 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e4b1e89-804b-413c-8d0d-7f89ce8f715d" (UID: "4e4b1e89-804b-413c-8d0d-7f89ce8f715d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:15 crc kubenswrapper[4684]: I1013 13:24:15.994399 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-config-data" (OuterVolumeSpecName: "config-data") pod "72ac328b-e864-4a16-954e-52fc0e0972e5" (UID: "72ac328b-e864-4a16-954e-52fc0e0972e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.034808 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.036103 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.036376 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvphh\" (UniqueName: \"kubernetes.io/projected/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-kube-api-access-pvphh\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.036393 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.036404 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.037578 4684 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.037640 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4b1e89-804b-413c-8d0d-7f89ce8f715d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.037651 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72ac328b-e864-4a16-954e-52fc0e0972e5-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.051181 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.058751 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059187 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: E1013 13:24:16.059647 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-httpd" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059665 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-httpd" Oct 13 13:24:16 crc kubenswrapper[4684]: E1013 13:24:16.059685 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="proxy-httpd" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059694 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="proxy-httpd" Oct 13 13:24:16 crc kubenswrapper[4684]: E1013 13:24:16.059729 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-notification-agent" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059737 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-notification-agent" Oct 13 13:24:16 crc kubenswrapper[4684]: E1013 13:24:16.059758 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="sg-core" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059765 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="sg-core" Oct 13 13:24:16 crc kubenswrapper[4684]: E1013 13:24:16.059776 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-log" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059786 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-log" Oct 13 13:24:16 crc kubenswrapper[4684]: E1013 13:24:16.059796 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-central-agent" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.059804 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-central-agent" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.060021 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="sg-core" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.060038 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="proxy-httpd" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.060053 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-central-agent" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.060071 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" containerName="ceilometer-notification-agent" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.060084 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-log" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.060102 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" containerName="glance-httpd" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.062169 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.065591 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.066429 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.066594 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.139195 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.241754 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.241863 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg9h8\" (UniqueName: \"kubernetes.io/projected/28d8aeac-fcad-4ea8-955a-b7bf68921f86-kube-api-access-kg9h8\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.241999 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-scripts\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.242076 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-config-data\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.242113 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.242360 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-log-httpd\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.242520 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-run-httpd\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.344082 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-log-httpd\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.344776 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-run-httpd\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.344859 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.344961 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg9h8\" (UniqueName: \"kubernetes.io/projected/28d8aeac-fcad-4ea8-955a-b7bf68921f86-kube-api-access-kg9h8\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.345005 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-scripts\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.345092 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-config-data\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.345130 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.344672 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-log-httpd\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.346780 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-run-httpd\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.350053 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-scripts\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.350320 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-config-data\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.350831 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.356229 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.365461 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg9h8\" (UniqueName: \"kubernetes.io/projected/28d8aeac-fcad-4ea8-955a-b7bf68921f86-kube-api-access-kg9h8\") pod \"ceilometer-0\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.366413 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72ac328b-e864-4a16-954e-52fc0e0972e5" path="/var/lib/kubelet/pods/72ac328b-e864-4a16-954e-52fc0e0972e5/volumes" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.428536 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.727260 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e4b1e89-804b-413c-8d0d-7f89ce8f715d","Type":"ContainerDied","Data":"9e6cf633fcb92267bae82df47d90c9dca6108ade12a12d8717764fb5dbc0d95d"} Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.727582 4684 scope.go:117] "RemoveContainer" containerID="ee6936771997352ccf4f2df55b3b9633e81b83f856e1065eb37b4c3b1b8ddc03" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.727688 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.757939 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.776433 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.779585 4684 scope.go:117] "RemoveContainer" containerID="da779bff9234e8b2b0df6f3e100e87fde96f281ff3439e27f903f2341ab4d3a8" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.804011 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.812963 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.817964 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.821772 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.833296 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.901621 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.942750 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959231 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-config-data\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959282 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-scripts\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959313 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959349 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drwwg\" (UniqueName: \"kubernetes.io/projected/615ce132-dbef-40c4-afd3-871c94b552ed-kube-api-access-drwwg\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959364 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/615ce132-dbef-40c4-afd3-871c94b552ed-logs\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959517 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/615ce132-dbef-40c4-afd3-871c94b552ed-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959694 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:16 crc kubenswrapper[4684]: I1013 13:24:16.959998 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063070 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-combined-ca-bundle\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063134 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-config-data\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063234 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063261 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-public-tls-certs\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063292 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-scripts\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063322 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-httpd-run\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063339 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-logs\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063416 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tcjv\" (UniqueName: \"kubernetes.io/projected/2ffb05bd-2aa7-44dd-a791-859bf7b47747-kube-api-access-7tcjv\") pod \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\" (UID: \"2ffb05bd-2aa7-44dd-a791-859bf7b47747\") " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063707 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-config-data\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063771 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-scripts\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063813 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063838 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drwwg\" (UniqueName: \"kubernetes.io/projected/615ce132-dbef-40c4-afd3-871c94b552ed-kube-api-access-drwwg\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063860 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/615ce132-dbef-40c4-afd3-871c94b552ed-logs\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063917 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/615ce132-dbef-40c4-afd3-871c94b552ed-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.063982 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.064063 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.069202 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ffb05bd-2aa7-44dd-a791-859bf7b47747-kube-api-access-7tcjv" (OuterVolumeSpecName: "kube-api-access-7tcjv") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "kube-api-access-7tcjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.069562 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.069952 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/615ce132-dbef-40c4-afd3-871c94b552ed-logs\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.070236 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/615ce132-dbef-40c4-afd3-871c94b552ed-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.070395 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.073071 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-scripts" (OuterVolumeSpecName: "scripts") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.073378 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.073674 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-logs" (OuterVolumeSpecName: "logs") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.077827 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-scripts\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.081273 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.083851 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.090779 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615ce132-dbef-40c4-afd3-871c94b552ed-config-data\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.106673 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drwwg\" (UniqueName: \"kubernetes.io/projected/615ce132-dbef-40c4-afd3-871c94b552ed-kube-api-access-drwwg\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.123355 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.128996 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"615ce132-dbef-40c4-afd3-871c94b552ed\") " pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.148508 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-config-data" (OuterVolumeSpecName: "config-data") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165446 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165478 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165506 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165526 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165540 4684 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165555 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ffb05bd-2aa7-44dd-a791-859bf7b47747-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.165565 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tcjv\" (UniqueName: \"kubernetes.io/projected/2ffb05bd-2aa7-44dd-a791-859bf7b47747-kube-api-access-7tcjv\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.170585 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.192681 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.202132 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2ffb05bd-2aa7-44dd-a791-859bf7b47747" (UID: "2ffb05bd-2aa7-44dd-a791-859bf7b47747"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.267327 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.267360 4684 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ffb05bd-2aa7-44dd-a791-859bf7b47747-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.421917 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.746255 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerStarted","Data":"dc5ba28514cce5c78b633c0abdecf34948795ce95a78f5bb929b98e172756f8a"} Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.746603 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerStarted","Data":"20d26a9c47a4b6f56e9500fd68a65c7c71d00da993afe5cf10b68f6ed93b8259"} Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.747994 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.748018 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2ffb05bd-2aa7-44dd-a791-859bf7b47747","Type":"ContainerDied","Data":"b9363b494822683532faf3efccb587c1e81e6eca30fa3a45216b692c2e18ea20"} Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.748086 4684 scope.go:117] "RemoveContainer" containerID="f90f04890d5085d43859257a2af92284eedd52346c1b2550a194f0d9b964f609" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.770394 4684 scope.go:117] "RemoveContainer" containerID="01dfdd6bd7ba8cdb4ac3f7d9c19e722938ed613611b99affe81a6e24097183f6" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.784794 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.793508 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.831701 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 13:24:17 crc kubenswrapper[4684]: W1013 13:24:17.838915 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod615ce132_dbef_40c4_afd3_871c94b552ed.slice/crio-75b113b8a061a9e3fd31506bbf6462e833877780febebfb3757cde2bc5a80ddc WatchSource:0}: Error finding container 75b113b8a061a9e3fd31506bbf6462e833877780febebfb3757cde2bc5a80ddc: Status 404 returned error can't find the container with id 75b113b8a061a9e3fd31506bbf6462e833877780febebfb3757cde2bc5a80ddc Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.850816 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:24:17 crc kubenswrapper[4684]: E1013 13:24:17.851253 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-httpd" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.851271 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-httpd" Oct 13 13:24:17 crc kubenswrapper[4684]: E1013 13:24:17.851288 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-log" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.851295 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-log" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.851455 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-log" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.851475 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" containerName="glance-httpd" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.852453 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.854025 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.855415 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.877489 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979066 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979141 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979200 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979226 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-config-data\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979288 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80891967-a94b-4146-b440-cc217b235eee-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979350 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q45t\" (UniqueName: \"kubernetes.io/projected/80891967-a94b-4146-b440-cc217b235eee-kube-api-access-5q45t\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979373 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-scripts\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:17 crc kubenswrapper[4684]: I1013 13:24:17.979448 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80891967-a94b-4146-b440-cc217b235eee-logs\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080688 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q45t\" (UniqueName: \"kubernetes.io/projected/80891967-a94b-4146-b440-cc217b235eee-kube-api-access-5q45t\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080729 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-scripts\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080781 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80891967-a94b-4146-b440-cc217b235eee-logs\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080810 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080843 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080882 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080918 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-config-data\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.080952 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80891967-a94b-4146-b440-cc217b235eee-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.081407 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80891967-a94b-4146-b440-cc217b235eee-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.081715 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80891967-a94b-4146-b440-cc217b235eee-logs\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.082004 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.086290 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-config-data\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.087831 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-scripts\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.088855 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.093507 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80891967-a94b-4146-b440-cc217b235eee-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.107561 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q45t\" (UniqueName: \"kubernetes.io/projected/80891967-a94b-4146-b440-cc217b235eee-kube-api-access-5q45t\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.114541 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"80891967-a94b-4146-b440-cc217b235eee\") " pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.201176 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.225039 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.383467 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ffb05bd-2aa7-44dd-a791-859bf7b47747" path="/var/lib/kubelet/pods/2ffb05bd-2aa7-44dd-a791-859bf7b47747/volumes" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.385082 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e4b1e89-804b-413c-8d0d-7f89ce8f715d" path="/var/lib/kubelet/pods/4e4b1e89-804b-413c-8d0d-7f89ce8f715d/volumes" Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.766541 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerStarted","Data":"1028fe3388a9a4800629f746dc8cc6ef4388fcb56369c6bc4824ef5217b29710"} Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.767075 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerStarted","Data":"450f6ebe4b5c9af4d3aad553ce3ec158c19a47a9309cfa643263a31e41cec14c"} Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.769982 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"615ce132-dbef-40c4-afd3-871c94b552ed","Type":"ContainerStarted","Data":"2a6050291f43a316b7300d462e74f97f58b148f0cb6f94f642b0cafc433bea0f"} Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.770013 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"615ce132-dbef-40c4-afd3-871c94b552ed","Type":"ContainerStarted","Data":"75b113b8a061a9e3fd31506bbf6462e833877780febebfb3757cde2bc5a80ddc"} Oct 13 13:24:18 crc kubenswrapper[4684]: I1013 13:24:18.810093 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 13:24:19 crc kubenswrapper[4684]: I1013 13:24:19.817576 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80891967-a94b-4146-b440-cc217b235eee","Type":"ContainerStarted","Data":"4812013fdf04b1435aaff532e99c957afad6773b2ebd2c89e5ce08e4a03f441f"} Oct 13 13:24:19 crc kubenswrapper[4684]: I1013 13:24:19.819041 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80891967-a94b-4146-b440-cc217b235eee","Type":"ContainerStarted","Data":"f458c19aaaa6cbd4a0a1f02d532a5cf01ca4c45e2e1ecfdfb54daca29439e3c3"} Oct 13 13:24:19 crc kubenswrapper[4684]: I1013 13:24:19.827587 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"615ce132-dbef-40c4-afd3-871c94b552ed","Type":"ContainerStarted","Data":"0d4ab03882da759feb943643cf8aa134b1f59af08f5a496f9c1e4755d78a7b87"} Oct 13 13:24:19 crc kubenswrapper[4684]: I1013 13:24:19.860890 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.860871459 podStartE2EDuration="3.860871459s" podCreationTimestamp="2025-10-13 13:24:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:24:19.844375173 +0000 UTC m=+1014.411759243" watchObservedRunningTime="2025-10-13 13:24:19.860871459 +0000 UTC m=+1014.428255529" Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.838188 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80891967-a94b-4146-b440-cc217b235eee","Type":"ContainerStarted","Data":"a35e88f1582ea9a739dee959642d4651c11b4f634fa12de0c03c240049eee70c"} Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.841502 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerStarted","Data":"ccccde0129d1114a5a34b024d288962322c52a06e855bda3f2891348857709d7"} Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.841716 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-central-agent" containerID="cri-o://dc5ba28514cce5c78b633c0abdecf34948795ce95a78f5bb929b98e172756f8a" gracePeriod=30 Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.841778 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="sg-core" containerID="cri-o://1028fe3388a9a4800629f746dc8cc6ef4388fcb56369c6bc4824ef5217b29710" gracePeriod=30 Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.841773 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-notification-agent" containerID="cri-o://450f6ebe4b5c9af4d3aad553ce3ec158c19a47a9309cfa643263a31e41cec14c" gracePeriod=30 Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.841915 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="proxy-httpd" containerID="cri-o://ccccde0129d1114a5a34b024d288962322c52a06e855bda3f2891348857709d7" gracePeriod=30 Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.867177 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.867160299 podStartE2EDuration="3.867160299s" podCreationTimestamp="2025-10-13 13:24:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:24:20.859251127 +0000 UTC m=+1015.426635197" watchObservedRunningTime="2025-10-13 13:24:20.867160299 +0000 UTC m=+1015.434544369" Oct 13 13:24:20 crc kubenswrapper[4684]: I1013 13:24:20.887932 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.515079879 podStartE2EDuration="4.887895381s" podCreationTimestamp="2025-10-13 13:24:16 +0000 UTC" firstStartedPulling="2025-10-13 13:24:16.916921638 +0000 UTC m=+1011.484305708" lastFinishedPulling="2025-10-13 13:24:20.28973714 +0000 UTC m=+1014.857121210" observedRunningTime="2025-10-13 13:24:20.882780968 +0000 UTC m=+1015.450165048" watchObservedRunningTime="2025-10-13 13:24:20.887895381 +0000 UTC m=+1015.455279451" Oct 13 13:24:21 crc kubenswrapper[4684]: I1013 13:24:21.852680 4684 generic.go:334] "Generic (PLEG): container finished" podID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerID="ccccde0129d1114a5a34b024d288962322c52a06e855bda3f2891348857709d7" exitCode=0 Oct 13 13:24:21 crc kubenswrapper[4684]: I1013 13:24:21.853032 4684 generic.go:334] "Generic (PLEG): container finished" podID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerID="1028fe3388a9a4800629f746dc8cc6ef4388fcb56369c6bc4824ef5217b29710" exitCode=2 Oct 13 13:24:21 crc kubenswrapper[4684]: I1013 13:24:21.853049 4684 generic.go:334] "Generic (PLEG): container finished" podID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerID="450f6ebe4b5c9af4d3aad553ce3ec158c19a47a9309cfa643263a31e41cec14c" exitCode=0 Oct 13 13:24:21 crc kubenswrapper[4684]: I1013 13:24:21.852716 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerDied","Data":"ccccde0129d1114a5a34b024d288962322c52a06e855bda3f2891348857709d7"} Oct 13 13:24:21 crc kubenswrapper[4684]: I1013 13:24:21.853183 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerDied","Data":"1028fe3388a9a4800629f746dc8cc6ef4388fcb56369c6bc4824ef5217b29710"} Oct 13 13:24:21 crc kubenswrapper[4684]: I1013 13:24:21.853217 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerDied","Data":"450f6ebe4b5c9af4d3aad553ce3ec158c19a47a9309cfa643263a31e41cec14c"} Oct 13 13:24:26 crc kubenswrapper[4684]: I1013 13:24:26.895149 4684 generic.go:334] "Generic (PLEG): container finished" podID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerID="dc5ba28514cce5c78b633c0abdecf34948795ce95a78f5bb929b98e172756f8a" exitCode=0 Oct 13 13:24:26 crc kubenswrapper[4684]: I1013 13:24:26.895450 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerDied","Data":"dc5ba28514cce5c78b633c0abdecf34948795ce95a78f5bb929b98e172756f8a"} Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.100682 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.158712 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-log-httpd\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.158764 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-sg-core-conf-yaml\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.159081 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-combined-ca-bundle\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.159227 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-run-httpd\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.159288 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg9h8\" (UniqueName: \"kubernetes.io/projected/28d8aeac-fcad-4ea8-955a-b7bf68921f86-kube-api-access-kg9h8\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.159335 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-scripts\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.159398 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-config-data\") pod \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\" (UID: \"28d8aeac-fcad-4ea8-955a-b7bf68921f86\") " Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.160794 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.161020 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.165420 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-scripts" (OuterVolumeSpecName: "scripts") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.166206 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28d8aeac-fcad-4ea8-955a-b7bf68921f86-kube-api-access-kg9h8" (OuterVolumeSpecName: "kube-api-access-kg9h8") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "kube-api-access-kg9h8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.172545 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.172602 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.189506 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.212825 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.219635 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.245619 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.261499 4684 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.261530 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg9h8\" (UniqueName: \"kubernetes.io/projected/28d8aeac-fcad-4ea8-955a-b7bf68921f86-kube-api-access-kg9h8\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.261540 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.261548 4684 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/28d8aeac-fcad-4ea8-955a-b7bf68921f86-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.261557 4684 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.261565 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.278314 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-config-data" (OuterVolumeSpecName: "config-data") pod "28d8aeac-fcad-4ea8-955a-b7bf68921f86" (UID: "28d8aeac-fcad-4ea8-955a-b7bf68921f86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.363546 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28d8aeac-fcad-4ea8-955a-b7bf68921f86-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.433625 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-6hd9g"] Oct 13 13:24:27 crc kubenswrapper[4684]: E1013 13:24:27.433995 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="proxy-httpd" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434010 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="proxy-httpd" Oct 13 13:24:27 crc kubenswrapper[4684]: E1013 13:24:27.434030 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-central-agent" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434038 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-central-agent" Oct 13 13:24:27 crc kubenswrapper[4684]: E1013 13:24:27.434062 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="sg-core" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434069 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="sg-core" Oct 13 13:24:27 crc kubenswrapper[4684]: E1013 13:24:27.434079 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-notification-agent" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434085 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-notification-agent" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434238 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="proxy-httpd" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434251 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="sg-core" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434269 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-central-agent" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434290 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" containerName="ceilometer-notification-agent" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.434837 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.446008 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6hd9g"] Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.537531 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-8tjn6"] Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.538853 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.545808 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-8tjn6"] Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.567319 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzl8q\" (UniqueName: \"kubernetes.io/projected/b5d8d6f8-445c-44fc-bf63-ffb2b676a661-kube-api-access-gzl8q\") pod \"nova-api-db-create-6hd9g\" (UID: \"b5d8d6f8-445c-44fc-bf63-ffb2b676a661\") " pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.638607 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-c2k7b"] Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.639697 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.649848 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-c2k7b"] Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.670168 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndlm2\" (UniqueName: \"kubernetes.io/projected/e05a9c82-2148-4d4a-838d-3b4c98faff04-kube-api-access-ndlm2\") pod \"nova-cell0-db-create-8tjn6\" (UID: \"e05a9c82-2148-4d4a-838d-3b4c98faff04\") " pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.670235 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzl8q\" (UniqueName: \"kubernetes.io/projected/b5d8d6f8-445c-44fc-bf63-ffb2b676a661-kube-api-access-gzl8q\") pod \"nova-api-db-create-6hd9g\" (UID: \"b5d8d6f8-445c-44fc-bf63-ffb2b676a661\") " pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.670288 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qt5f\" (UniqueName: \"kubernetes.io/projected/294e80af-0da4-4813-967e-9b972e56a5e2-kube-api-access-4qt5f\") pod \"nova-cell1-db-create-c2k7b\" (UID: \"294e80af-0da4-4813-967e-9b972e56a5e2\") " pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.686372 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzl8q\" (UniqueName: \"kubernetes.io/projected/b5d8d6f8-445c-44fc-bf63-ffb2b676a661-kube-api-access-gzl8q\") pod \"nova-api-db-create-6hd9g\" (UID: \"b5d8d6f8-445c-44fc-bf63-ffb2b676a661\") " pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.750015 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.771932 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndlm2\" (UniqueName: \"kubernetes.io/projected/e05a9c82-2148-4d4a-838d-3b4c98faff04-kube-api-access-ndlm2\") pod \"nova-cell0-db-create-8tjn6\" (UID: \"e05a9c82-2148-4d4a-838d-3b4c98faff04\") " pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.772036 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qt5f\" (UniqueName: \"kubernetes.io/projected/294e80af-0da4-4813-967e-9b972e56a5e2-kube-api-access-4qt5f\") pod \"nova-cell1-db-create-c2k7b\" (UID: \"294e80af-0da4-4813-967e-9b972e56a5e2\") " pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.792359 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qt5f\" (UniqueName: \"kubernetes.io/projected/294e80af-0da4-4813-967e-9b972e56a5e2-kube-api-access-4qt5f\") pod \"nova-cell1-db-create-c2k7b\" (UID: \"294e80af-0da4-4813-967e-9b972e56a5e2\") " pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.792612 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndlm2\" (UniqueName: \"kubernetes.io/projected/e05a9c82-2148-4d4a-838d-3b4c98faff04-kube-api-access-ndlm2\") pod \"nova-cell0-db-create-8tjn6\" (UID: \"e05a9c82-2148-4d4a-838d-3b4c98faff04\") " pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.856465 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.916519 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"28d8aeac-fcad-4ea8-955a-b7bf68921f86","Type":"ContainerDied","Data":"20d26a9c47a4b6f56e9500fd68a65c7c71d00da993afe5cf10b68f6ed93b8259"} Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.916563 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.916584 4684 scope.go:117] "RemoveContainer" containerID="ccccde0129d1114a5a34b024d288962322c52a06e855bda3f2891348857709d7" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.916718 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.918263 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.957762 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.963424 4684 scope.go:117] "RemoveContainer" containerID="1028fe3388a9a4800629f746dc8cc6ef4388fcb56369c6bc4824ef5217b29710" Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.976837 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:27 crc kubenswrapper[4684]: I1013 13:24:27.986550 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.015518 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.033758 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.039430 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.039671 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.040152 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.097208 4684 scope.go:117] "RemoveContainer" containerID="450f6ebe4b5c9af4d3aad553ce3ec158c19a47a9309cfa643263a31e41cec14c" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.144661 4684 scope.go:117] "RemoveContainer" containerID="dc5ba28514cce5c78b633c0abdecf34948795ce95a78f5bb929b98e172756f8a" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.189963 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-config-data\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.190057 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-scripts\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.190404 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.190765 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-run-httpd\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.190796 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9bkz\" (UniqueName: \"kubernetes.io/projected/bdc49f07-01da-4e40-8ac2-482a0e88fc86-kube-api-access-l9bkz\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.190914 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.190949 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-log-httpd\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.202463 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.203371 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.236179 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.276288 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.285323 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6hd9g"] Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293327 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-config-data\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293411 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-scripts\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293447 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293523 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-run-httpd\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293543 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9bkz\" (UniqueName: \"kubernetes.io/projected/bdc49f07-01da-4e40-8ac2-482a0e88fc86-kube-api-access-l9bkz\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293614 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.293638 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-log-httpd\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.294172 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-log-httpd\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.294412 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-run-httpd\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.301911 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.305566 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-config-data\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.310876 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-scripts\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.320619 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9bkz\" (UniqueName: \"kubernetes.io/projected/bdc49f07-01da-4e40-8ac2-482a0e88fc86-kube-api-access-l9bkz\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.321925 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.369327 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28d8aeac-fcad-4ea8-955a-b7bf68921f86" path="/var/lib/kubelet/pods/28d8aeac-fcad-4ea8-955a-b7bf68921f86/volumes" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.426568 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.449788 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-8tjn6"] Oct 13 13:24:28 crc kubenswrapper[4684]: W1013 13:24:28.487309 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode05a9c82_2148_4d4a_838d_3b4c98faff04.slice/crio-a4c0521a50d566e22c4ce19deb4c12f9b599c78eb5bd7d5202629670b5ffbbd5 WatchSource:0}: Error finding container a4c0521a50d566e22c4ce19deb4c12f9b599c78eb5bd7d5202629670b5ffbbd5: Status 404 returned error can't find the container with id a4c0521a50d566e22c4ce19deb4c12f9b599c78eb5bd7d5202629670b5ffbbd5 Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.557392 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-c2k7b"] Oct 13 13:24:28 crc kubenswrapper[4684]: W1013 13:24:28.566530 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod294e80af_0da4_4813_967e_9b972e56a5e2.slice/crio-a83b1cd1a92a0172445e8bdacd46e21dd86f8e18345ba1261349e097aa77cf64 WatchSource:0}: Error finding container a83b1cd1a92a0172445e8bdacd46e21dd86f8e18345ba1261349e097aa77cf64: Status 404 returned error can't find the container with id a83b1cd1a92a0172445e8bdacd46e21dd86f8e18345ba1261349e097aa77cf64 Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.923644 4684 generic.go:334] "Generic (PLEG): container finished" podID="b5d8d6f8-445c-44fc-bf63-ffb2b676a661" containerID="4e166dc4fd3f0fa6b6f665a1a176ba438a307d5a6e4a3c353272e4b4a790284e" exitCode=0 Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.923690 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6hd9g" event={"ID":"b5d8d6f8-445c-44fc-bf63-ffb2b676a661","Type":"ContainerDied","Data":"4e166dc4fd3f0fa6b6f665a1a176ba438a307d5a6e4a3c353272e4b4a790284e"} Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.923941 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6hd9g" event={"ID":"b5d8d6f8-445c-44fc-bf63-ffb2b676a661","Type":"ContainerStarted","Data":"9ab4837161326bf5114bb9c6a712cdecab1c9e64b23b596185ebf2372400ec75"} Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.940159 4684 generic.go:334] "Generic (PLEG): container finished" podID="e05a9c82-2148-4d4a-838d-3b4c98faff04" containerID="6d4677711d353c7285a580a77e5224b723257536c9d43c00b82286a3b4705ad9" exitCode=0 Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.940444 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8tjn6" event={"ID":"e05a9c82-2148-4d4a-838d-3b4c98faff04","Type":"ContainerDied","Data":"6d4677711d353c7285a580a77e5224b723257536c9d43c00b82286a3b4705ad9"} Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.940583 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8tjn6" event={"ID":"e05a9c82-2148-4d4a-838d-3b4c98faff04","Type":"ContainerStarted","Data":"a4c0521a50d566e22c4ce19deb4c12f9b599c78eb5bd7d5202629670b5ffbbd5"} Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.948585 4684 generic.go:334] "Generic (PLEG): container finished" podID="294e80af-0da4-4813-967e-9b972e56a5e2" containerID="79a56616c82199d8db3ddc0e9c6c9f8f3624c72ddc74c92e668b484a0e099cc7" exitCode=0 Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.949728 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-c2k7b" event={"ID":"294e80af-0da4-4813-967e-9b972e56a5e2","Type":"ContainerDied","Data":"79a56616c82199d8db3ddc0e9c6c9f8f3624c72ddc74c92e668b484a0e099cc7"} Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.949796 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-c2k7b" event={"ID":"294e80af-0da4-4813-967e-9b972e56a5e2","Type":"ContainerStarted","Data":"a83b1cd1a92a0172445e8bdacd46e21dd86f8e18345ba1261349e097aa77cf64"} Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.950136 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.950376 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 13:24:28 crc kubenswrapper[4684]: I1013 13:24:28.998207 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:29 crc kubenswrapper[4684]: I1013 13:24:29.971161 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:29 crc kubenswrapper[4684]: I1013 13:24:29.977011 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:24:29 crc kubenswrapper[4684]: I1013 13:24:29.977031 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:24:29 crc kubenswrapper[4684]: I1013 13:24:29.976999 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerStarted","Data":"e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7"} Oct 13 13:24:29 crc kubenswrapper[4684]: I1013 13:24:29.977073 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerStarted","Data":"d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92"} Oct 13 13:24:29 crc kubenswrapper[4684]: I1013 13:24:29.977087 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerStarted","Data":"46ec19a9f94cf32344bc192c53ed10382ed51a06d0c8ceef2441a2031703ec89"} Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.416389 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.416728 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.552545 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.562555 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.590116 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.652557 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzl8q\" (UniqueName: \"kubernetes.io/projected/b5d8d6f8-445c-44fc-bf63-ffb2b676a661-kube-api-access-gzl8q\") pod \"b5d8d6f8-445c-44fc-bf63-ffb2b676a661\" (UID: \"b5d8d6f8-445c-44fc-bf63-ffb2b676a661\") " Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.653870 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndlm2\" (UniqueName: \"kubernetes.io/projected/e05a9c82-2148-4d4a-838d-3b4c98faff04-kube-api-access-ndlm2\") pod \"e05a9c82-2148-4d4a-838d-3b4c98faff04\" (UID: \"e05a9c82-2148-4d4a-838d-3b4c98faff04\") " Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.658673 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e05a9c82-2148-4d4a-838d-3b4c98faff04-kube-api-access-ndlm2" (OuterVolumeSpecName: "kube-api-access-ndlm2") pod "e05a9c82-2148-4d4a-838d-3b4c98faff04" (UID: "e05a9c82-2148-4d4a-838d-3b4c98faff04"). InnerVolumeSpecName "kube-api-access-ndlm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.660108 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5d8d6f8-445c-44fc-bf63-ffb2b676a661-kube-api-access-gzl8q" (OuterVolumeSpecName: "kube-api-access-gzl8q") pod "b5d8d6f8-445c-44fc-bf63-ffb2b676a661" (UID: "b5d8d6f8-445c-44fc-bf63-ffb2b676a661"). InnerVolumeSpecName "kube-api-access-gzl8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.755220 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qt5f\" (UniqueName: \"kubernetes.io/projected/294e80af-0da4-4813-967e-9b972e56a5e2-kube-api-access-4qt5f\") pod \"294e80af-0da4-4813-967e-9b972e56a5e2\" (UID: \"294e80af-0da4-4813-967e-9b972e56a5e2\") " Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.755618 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndlm2\" (UniqueName: \"kubernetes.io/projected/e05a9c82-2148-4d4a-838d-3b4c98faff04-kube-api-access-ndlm2\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.755634 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzl8q\" (UniqueName: \"kubernetes.io/projected/b5d8d6f8-445c-44fc-bf63-ffb2b676a661-kube-api-access-gzl8q\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.761129 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/294e80af-0da4-4813-967e-9b972e56a5e2-kube-api-access-4qt5f" (OuterVolumeSpecName: "kube-api-access-4qt5f") pod "294e80af-0da4-4813-967e-9b972e56a5e2" (UID: "294e80af-0da4-4813-967e-9b972e56a5e2"). InnerVolumeSpecName "kube-api-access-4qt5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.857065 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qt5f\" (UniqueName: \"kubernetes.io/projected/294e80af-0da4-4813-967e-9b972e56a5e2-kube-api-access-4qt5f\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.985851 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-c2k7b" event={"ID":"294e80af-0da4-4813-967e-9b972e56a5e2","Type":"ContainerDied","Data":"a83b1cd1a92a0172445e8bdacd46e21dd86f8e18345ba1261349e097aa77cf64"} Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.986775 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a83b1cd1a92a0172445e8bdacd46e21dd86f8e18345ba1261349e097aa77cf64" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.985878 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-c2k7b" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.988974 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerStarted","Data":"cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3"} Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.990238 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6hd9g" event={"ID":"b5d8d6f8-445c-44fc-bf63-ffb2b676a661","Type":"ContainerDied","Data":"9ab4837161326bf5114bb9c6a712cdecab1c9e64b23b596185ebf2372400ec75"} Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.990254 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6hd9g" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.990265 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ab4837161326bf5114bb9c6a712cdecab1c9e64b23b596185ebf2372400ec75" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.997297 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-8tjn6" event={"ID":"e05a9c82-2148-4d4a-838d-3b4c98faff04","Type":"ContainerDied","Data":"a4c0521a50d566e22c4ce19deb4c12f9b599c78eb5bd7d5202629670b5ffbbd5"} Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.997362 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4c0521a50d566e22c4ce19deb4c12f9b599c78eb5bd7d5202629670b5ffbbd5" Oct 13 13:24:30 crc kubenswrapper[4684]: I1013 13:24:30.997479 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-8tjn6" Oct 13 13:24:31 crc kubenswrapper[4684]: I1013 13:24:31.390395 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 13:24:31 crc kubenswrapper[4684]: I1013 13:24:31.390940 4684 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 13:24:31 crc kubenswrapper[4684]: I1013 13:24:31.468124 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.025303 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerStarted","Data":"a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0"} Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.026762 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.025462 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-notification-agent" containerID="cri-o://e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7" gracePeriod=30 Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.025468 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="proxy-httpd" containerID="cri-o://a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0" gracePeriod=30 Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.025491 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="sg-core" containerID="cri-o://cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3" gracePeriod=30 Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.025396 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-central-agent" containerID="cri-o://d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92" gracePeriod=30 Oct 13 13:24:33 crc kubenswrapper[4684]: I1013 13:24:33.053599 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.124394991 podStartE2EDuration="6.05358501s" podCreationTimestamp="2025-10-13 13:24:27 +0000 UTC" firstStartedPulling="2025-10-13 13:24:28.991000336 +0000 UTC m=+1023.558384406" lastFinishedPulling="2025-10-13 13:24:31.920190355 +0000 UTC m=+1026.487574425" observedRunningTime="2025-10-13 13:24:33.050742339 +0000 UTC m=+1027.618126409" watchObservedRunningTime="2025-10-13 13:24:33.05358501 +0000 UTC m=+1027.620969080" Oct 13 13:24:34 crc kubenswrapper[4684]: I1013 13:24:34.039721 4684 generic.go:334] "Generic (PLEG): container finished" podID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerID="a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0" exitCode=0 Oct 13 13:24:34 crc kubenswrapper[4684]: I1013 13:24:34.039758 4684 generic.go:334] "Generic (PLEG): container finished" podID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerID="cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3" exitCode=2 Oct 13 13:24:34 crc kubenswrapper[4684]: I1013 13:24:34.039770 4684 generic.go:334] "Generic (PLEG): container finished" podID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerID="e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7" exitCode=0 Oct 13 13:24:34 crc kubenswrapper[4684]: I1013 13:24:34.039792 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerDied","Data":"a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0"} Oct 13 13:24:34 crc kubenswrapper[4684]: I1013 13:24:34.039820 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerDied","Data":"cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3"} Oct 13 13:24:34 crc kubenswrapper[4684]: I1013 13:24:34.039836 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerDied","Data":"e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7"} Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.498225 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.620749 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-run-httpd\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.620811 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-log-httpd\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621017 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9bkz\" (UniqueName: \"kubernetes.io/projected/bdc49f07-01da-4e40-8ac2-482a0e88fc86-kube-api-access-l9bkz\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621082 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-sg-core-conf-yaml\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621110 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-scripts\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621184 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-config-data\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621229 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-combined-ca-bundle\") pod \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\" (UID: \"bdc49f07-01da-4e40-8ac2-482a0e88fc86\") " Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621498 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.621873 4684 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.622498 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.631609 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-scripts" (OuterVolumeSpecName: "scripts") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.631661 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdc49f07-01da-4e40-8ac2-482a0e88fc86-kube-api-access-l9bkz" (OuterVolumeSpecName: "kube-api-access-l9bkz") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "kube-api-access-l9bkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.659312 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673184 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-dd65-account-create-c2w2g"] Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673632 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5d8d6f8-445c-44fc-bf63-ffb2b676a661" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673655 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5d8d6f8-445c-44fc-bf63-ffb2b676a661" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673670 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-central-agent" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673677 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-central-agent" Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673696 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="proxy-httpd" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673703 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="proxy-httpd" Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673722 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-notification-agent" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673729 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-notification-agent" Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673740 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="294e80af-0da4-4813-967e-9b972e56a5e2" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673747 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="294e80af-0da4-4813-967e-9b972e56a5e2" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673766 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e05a9c82-2148-4d4a-838d-3b4c98faff04" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673774 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e05a9c82-2148-4d4a-838d-3b4c98faff04" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: E1013 13:24:37.673801 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="sg-core" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.673808 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="sg-core" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674037 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="sg-core" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674058 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="e05a9c82-2148-4d4a-838d-3b4c98faff04" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674076 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5d8d6f8-445c-44fc-bf63-ffb2b676a661" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674086 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-central-agent" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674101 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="ceilometer-notification-agent" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674118 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerName="proxy-httpd" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674131 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="294e80af-0da4-4813-967e-9b972e56a5e2" containerName="mariadb-database-create" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.674824 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.677237 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.681267 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-dd65-account-create-c2w2g"] Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.723094 4684 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdc49f07-01da-4e40-8ac2-482a0e88fc86-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.723135 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9bkz\" (UniqueName: \"kubernetes.io/projected/bdc49f07-01da-4e40-8ac2-482a0e88fc86-kube-api-access-l9bkz\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.723150 4684 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.723162 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.727643 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.762107 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-config-data" (OuterVolumeSpecName: "config-data") pod "bdc49f07-01da-4e40-8ac2-482a0e88fc86" (UID: "bdc49f07-01da-4e40-8ac2-482a0e88fc86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.824882 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l572w\" (UniqueName: \"kubernetes.io/projected/6b2d5063-943d-4cea-b1cd-fc841e23d897-kube-api-access-l572w\") pod \"nova-api-dd65-account-create-c2w2g\" (UID: \"6b2d5063-943d-4cea-b1cd-fc841e23d897\") " pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.825065 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.825084 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdc49f07-01da-4e40-8ac2-482a0e88fc86-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.871953 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-608c-account-create-p4w5f"] Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.873088 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.875556 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.882266 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-608c-account-create-p4w5f"] Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.926683 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l572w\" (UniqueName: \"kubernetes.io/projected/6b2d5063-943d-4cea-b1cd-fc841e23d897-kube-api-access-l572w\") pod \"nova-api-dd65-account-create-c2w2g\" (UID: \"6b2d5063-943d-4cea-b1cd-fc841e23d897\") " pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:37 crc kubenswrapper[4684]: I1013 13:24:37.942075 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l572w\" (UniqueName: \"kubernetes.io/projected/6b2d5063-943d-4cea-b1cd-fc841e23d897-kube-api-access-l572w\") pod \"nova-api-dd65-account-create-c2w2g\" (UID: \"6b2d5063-943d-4cea-b1cd-fc841e23d897\") " pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.028417 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77662\" (UniqueName: \"kubernetes.io/projected/75fd1cdd-9db0-47d5-be3b-874bea0755b8-kube-api-access-77662\") pod \"nova-cell0-608c-account-create-p4w5f\" (UID: \"75fd1cdd-9db0-47d5-be3b-874bea0755b8\") " pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.055840 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.088098 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-0354-account-create-995tj"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.090285 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.093038 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.106046 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-0354-account-create-995tj"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.124371 4684 generic.go:334] "Generic (PLEG): container finished" podID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" containerID="d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92" exitCode=0 Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.124441 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerDied","Data":"d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92"} Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.124484 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdc49f07-01da-4e40-8ac2-482a0e88fc86","Type":"ContainerDied","Data":"46ec19a9f94cf32344bc192c53ed10382ed51a06d0c8ceef2441a2031703ec89"} Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.124509 4684 scope.go:117] "RemoveContainer" containerID="a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.124735 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.130451 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77662\" (UniqueName: \"kubernetes.io/projected/75fd1cdd-9db0-47d5-be3b-874bea0755b8-kube-api-access-77662\") pod \"nova-cell0-608c-account-create-p4w5f\" (UID: \"75fd1cdd-9db0-47d5-be3b-874bea0755b8\") " pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.159016 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77662\" (UniqueName: \"kubernetes.io/projected/75fd1cdd-9db0-47d5-be3b-874bea0755b8-kube-api-access-77662\") pod \"nova-cell0-608c-account-create-p4w5f\" (UID: \"75fd1cdd-9db0-47d5-be3b-874bea0755b8\") " pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.159638 4684 scope.go:117] "RemoveContainer" containerID="cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.170444 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.196769 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.197346 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.204495 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.213594 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.220082 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.220299 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.225778 4684 scope.go:117] "RemoveContainer" containerID="e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.232140 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pftmw\" (UniqueName: \"kubernetes.io/projected/b3904d99-f791-4d1d-b518-c5e367fa4d39-kube-api-access-pftmw\") pod \"nova-cell1-0354-account-create-995tj\" (UID: \"b3904d99-f791-4d1d-b518-c5e367fa4d39\") " pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.296851 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.319742 4684 scope.go:117] "RemoveContainer" containerID="d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334478 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsdzr\" (UniqueName: \"kubernetes.io/projected/41330135-d29c-494e-93ec-d67c66cbaf1c-kube-api-access-zsdzr\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334545 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-run-httpd\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334625 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-log-httpd\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334648 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334706 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334799 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pftmw\" (UniqueName: \"kubernetes.io/projected/b3904d99-f791-4d1d-b518-c5e367fa4d39-kube-api-access-pftmw\") pod \"nova-cell1-0354-account-create-995tj\" (UID: \"b3904d99-f791-4d1d-b518-c5e367fa4d39\") " pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334927 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-scripts\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.334974 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-config-data\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.360767 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pftmw\" (UniqueName: \"kubernetes.io/projected/b3904d99-f791-4d1d-b518-c5e367fa4d39-kube-api-access-pftmw\") pod \"nova-cell1-0354-account-create-995tj\" (UID: \"b3904d99-f791-4d1d-b518-c5e367fa4d39\") " pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.366874 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdc49f07-01da-4e40-8ac2-482a0e88fc86" path="/var/lib/kubelet/pods/bdc49f07-01da-4e40-8ac2-482a0e88fc86/volumes" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.420845 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.436835 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-scripts\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.436887 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-config-data\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.436950 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsdzr\" (UniqueName: \"kubernetes.io/projected/41330135-d29c-494e-93ec-d67c66cbaf1c-kube-api-access-zsdzr\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.436971 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-run-httpd\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.437018 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-log-httpd\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.437033 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.437062 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.442455 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-run-httpd\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.443164 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-log-httpd\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.443402 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-scripts\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.445088 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.447202 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-config-data\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.447987 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.461728 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsdzr\" (UniqueName: \"kubernetes.io/projected/41330135-d29c-494e-93ec-d67c66cbaf1c-kube-api-access-zsdzr\") pod \"ceilometer-0\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.472473 4684 scope.go:117] "RemoveContainer" containerID="a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0" Oct 13 13:24:38 crc kubenswrapper[4684]: E1013 13:24:38.473050 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0\": container with ID starting with a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0 not found: ID does not exist" containerID="a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.473117 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0"} err="failed to get container status \"a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0\": rpc error: code = NotFound desc = could not find container \"a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0\": container with ID starting with a22cec39368e733bd9775b3b61ecaddec97abc4ea9a58d510e7c9268cf85dcb0 not found: ID does not exist" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.473143 4684 scope.go:117] "RemoveContainer" containerID="cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3" Oct 13 13:24:38 crc kubenswrapper[4684]: E1013 13:24:38.473671 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3\": container with ID starting with cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3 not found: ID does not exist" containerID="cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.473705 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3"} err="failed to get container status \"cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3\": rpc error: code = NotFound desc = could not find container \"cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3\": container with ID starting with cddb9311ba729274c718faa9f3e76de4df9eb0f39aecd09dbb97fd61984b83c3 not found: ID does not exist" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.473726 4684 scope.go:117] "RemoveContainer" containerID="e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7" Oct 13 13:24:38 crc kubenswrapper[4684]: E1013 13:24:38.474005 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7\": container with ID starting with e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7 not found: ID does not exist" containerID="e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.474027 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7"} err="failed to get container status \"e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7\": rpc error: code = NotFound desc = could not find container \"e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7\": container with ID starting with e89dd3dca2ee0ff89c935c41dbc44de23fc958f0030e60f4f04c5e06b5673ec7 not found: ID does not exist" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.474043 4684 scope.go:117] "RemoveContainer" containerID="d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92" Oct 13 13:24:38 crc kubenswrapper[4684]: E1013 13:24:38.474268 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92\": container with ID starting with d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92 not found: ID does not exist" containerID="d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.474296 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92"} err="failed to get container status \"d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92\": rpc error: code = NotFound desc = could not find container \"d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92\": container with ID starting with d94af620dd4c2e982e8b35579a6cf6dae8484b56f91e58948b00833561c7ac92 not found: ID does not exist" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.523060 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.580449 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-dd65-account-create-c2w2g"] Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.745879 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-608c-account-create-p4w5f"] Oct 13 13:24:38 crc kubenswrapper[4684]: W1013 13:24:38.753267 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75fd1cdd_9db0_47d5_be3b_874bea0755b8.slice/crio-5fa440101fd860ec518c690af0998c8e442d7536ed27598d11c3762d8c7a8b09 WatchSource:0}: Error finding container 5fa440101fd860ec518c690af0998c8e442d7536ed27598d11c3762d8c7a8b09: Status 404 returned error can't find the container with id 5fa440101fd860ec518c690af0998c8e442d7536ed27598d11c3762d8c7a8b09 Oct 13 13:24:38 crc kubenswrapper[4684]: I1013 13:24:38.916963 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-0354-account-create-995tj"] Oct 13 13:24:38 crc kubenswrapper[4684]: W1013 13:24:38.921263 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3904d99_f791_4d1d_b518_c5e367fa4d39.slice/crio-2fc011352efab8210e0ec552d7d8759611ef8f78962943ae852805502eceab26 WatchSource:0}: Error finding container 2fc011352efab8210e0ec552d7d8759611ef8f78962943ae852805502eceab26: Status 404 returned error can't find the container with id 2fc011352efab8210e0ec552d7d8759611ef8f78962943ae852805502eceab26 Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.030301 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:24:39 crc kubenswrapper[4684]: W1013 13:24:39.072733 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41330135_d29c_494e_93ec_d67c66cbaf1c.slice/crio-be40bd3a518b33206287c1c3f1b4e269c1289c32a8f23f6f4c166c7287802428 WatchSource:0}: Error finding container be40bd3a518b33206287c1c3f1b4e269c1289c32a8f23f6f4c166c7287802428: Status 404 returned error can't find the container with id be40bd3a518b33206287c1c3f1b4e269c1289c32a8f23f6f4c166c7287802428 Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.141863 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0354-account-create-995tj" event={"ID":"b3904d99-f791-4d1d-b518-c5e367fa4d39","Type":"ContainerStarted","Data":"86d228b77a0ec0e333bcf0f082841ca4b49ea3c25006705656853854e4a167d0"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.141976 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0354-account-create-995tj" event={"ID":"b3904d99-f791-4d1d-b518-c5e367fa4d39","Type":"ContainerStarted","Data":"2fc011352efab8210e0ec552d7d8759611ef8f78962943ae852805502eceab26"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.144676 4684 generic.go:334] "Generic (PLEG): container finished" podID="6b2d5063-943d-4cea-b1cd-fc841e23d897" containerID="baf6392375a2875bf926e1581663f0db5c20a74bd07a88dfa31f292d9ff8d64a" exitCode=0 Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.144737 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-dd65-account-create-c2w2g" event={"ID":"6b2d5063-943d-4cea-b1cd-fc841e23d897","Type":"ContainerDied","Data":"baf6392375a2875bf926e1581663f0db5c20a74bd07a88dfa31f292d9ff8d64a"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.144764 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-dd65-account-create-c2w2g" event={"ID":"6b2d5063-943d-4cea-b1cd-fc841e23d897","Type":"ContainerStarted","Data":"4f19c10e2bcad31fdddbe4e7967d0b9766ee6f58909e0761ca19364fa2cae8fa"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.148220 4684 generic.go:334] "Generic (PLEG): container finished" podID="75fd1cdd-9db0-47d5-be3b-874bea0755b8" containerID="3e064b130b66e8b0bb2e506a5e2f540aae5436596f9e030fb8b2a22edde25736" exitCode=0 Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.148352 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-608c-account-create-p4w5f" event={"ID":"75fd1cdd-9db0-47d5-be3b-874bea0755b8","Type":"ContainerDied","Data":"3e064b130b66e8b0bb2e506a5e2f540aae5436596f9e030fb8b2a22edde25736"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.148389 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-608c-account-create-p4w5f" event={"ID":"75fd1cdd-9db0-47d5-be3b-874bea0755b8","Type":"ContainerStarted","Data":"5fa440101fd860ec518c690af0998c8e442d7536ed27598d11c3762d8c7a8b09"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.151756 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerStarted","Data":"be40bd3a518b33206287c1c3f1b4e269c1289c32a8f23f6f4c166c7287802428"} Oct 13 13:24:39 crc kubenswrapper[4684]: I1013 13:24:39.161130 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-0354-account-create-995tj" podStartSLOduration=1.161084016 podStartE2EDuration="1.161084016s" podCreationTimestamp="2025-10-13 13:24:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:24:39.158811103 +0000 UTC m=+1033.726195173" watchObservedRunningTime="2025-10-13 13:24:39.161084016 +0000 UTC m=+1033.728468086" Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.168640 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerStarted","Data":"004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1"} Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.169422 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerStarted","Data":"05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c"} Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.170771 4684 generic.go:334] "Generic (PLEG): container finished" podID="b3904d99-f791-4d1d-b518-c5e367fa4d39" containerID="86d228b77a0ec0e333bcf0f082841ca4b49ea3c25006705656853854e4a167d0" exitCode=0 Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.170858 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0354-account-create-995tj" event={"ID":"b3904d99-f791-4d1d-b518-c5e367fa4d39","Type":"ContainerDied","Data":"86d228b77a0ec0e333bcf0f082841ca4b49ea3c25006705656853854e4a167d0"} Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.701717 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.717250 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.791373 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l572w\" (UniqueName: \"kubernetes.io/projected/6b2d5063-943d-4cea-b1cd-fc841e23d897-kube-api-access-l572w\") pod \"6b2d5063-943d-4cea-b1cd-fc841e23d897\" (UID: \"6b2d5063-943d-4cea-b1cd-fc841e23d897\") " Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.798118 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b2d5063-943d-4cea-b1cd-fc841e23d897-kube-api-access-l572w" (OuterVolumeSpecName: "kube-api-access-l572w") pod "6b2d5063-943d-4cea-b1cd-fc841e23d897" (UID: "6b2d5063-943d-4cea-b1cd-fc841e23d897"). InnerVolumeSpecName "kube-api-access-l572w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.893622 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77662\" (UniqueName: \"kubernetes.io/projected/75fd1cdd-9db0-47d5-be3b-874bea0755b8-kube-api-access-77662\") pod \"75fd1cdd-9db0-47d5-be3b-874bea0755b8\" (UID: \"75fd1cdd-9db0-47d5-be3b-874bea0755b8\") " Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.894210 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l572w\" (UniqueName: \"kubernetes.io/projected/6b2d5063-943d-4cea-b1cd-fc841e23d897-kube-api-access-l572w\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.896466 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75fd1cdd-9db0-47d5-be3b-874bea0755b8-kube-api-access-77662" (OuterVolumeSpecName: "kube-api-access-77662") pod "75fd1cdd-9db0-47d5-be3b-874bea0755b8" (UID: "75fd1cdd-9db0-47d5-be3b-874bea0755b8"). InnerVolumeSpecName "kube-api-access-77662". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:40 crc kubenswrapper[4684]: I1013 13:24:40.996105 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77662\" (UniqueName: \"kubernetes.io/projected/75fd1cdd-9db0-47d5-be3b-874bea0755b8-kube-api-access-77662\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.182735 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerStarted","Data":"7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6"} Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.184412 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-dd65-account-create-c2w2g" event={"ID":"6b2d5063-943d-4cea-b1cd-fc841e23d897","Type":"ContainerDied","Data":"4f19c10e2bcad31fdddbe4e7967d0b9766ee6f58909e0761ca19364fa2cae8fa"} Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.184452 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f19c10e2bcad31fdddbe4e7967d0b9766ee6f58909e0761ca19364fa2cae8fa" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.184466 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-dd65-account-create-c2w2g" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.186404 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-608c-account-create-p4w5f" event={"ID":"75fd1cdd-9db0-47d5-be3b-874bea0755b8","Type":"ContainerDied","Data":"5fa440101fd860ec518c690af0998c8e442d7536ed27598d11c3762d8c7a8b09"} Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.186451 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-608c-account-create-p4w5f" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.186464 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fa440101fd860ec518c690af0998c8e442d7536ed27598d11c3762d8c7a8b09" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.480259 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.630319 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pftmw\" (UniqueName: \"kubernetes.io/projected/b3904d99-f791-4d1d-b518-c5e367fa4d39-kube-api-access-pftmw\") pod \"b3904d99-f791-4d1d-b518-c5e367fa4d39\" (UID: \"b3904d99-f791-4d1d-b518-c5e367fa4d39\") " Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.644242 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3904d99-f791-4d1d-b518-c5e367fa4d39-kube-api-access-pftmw" (OuterVolumeSpecName: "kube-api-access-pftmw") pod "b3904d99-f791-4d1d-b518-c5e367fa4d39" (UID: "b3904d99-f791-4d1d-b518-c5e367fa4d39"). InnerVolumeSpecName "kube-api-access-pftmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:24:41 crc kubenswrapper[4684]: I1013 13:24:41.735276 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pftmw\" (UniqueName: \"kubernetes.io/projected/b3904d99-f791-4d1d-b518-c5e367fa4d39-kube-api-access-pftmw\") on node \"crc\" DevicePath \"\"" Oct 13 13:24:42 crc kubenswrapper[4684]: I1013 13:24:42.198031 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0354-account-create-995tj" event={"ID":"b3904d99-f791-4d1d-b518-c5e367fa4d39","Type":"ContainerDied","Data":"2fc011352efab8210e0ec552d7d8759611ef8f78962943ae852805502eceab26"} Oct 13 13:24:42 crc kubenswrapper[4684]: I1013 13:24:42.198094 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fc011352efab8210e0ec552d7d8759611ef8f78962943ae852805502eceab26" Oct 13 13:24:42 crc kubenswrapper[4684]: I1013 13:24:42.199032 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0354-account-create-995tj" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.158090 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xq69h"] Oct 13 13:24:43 crc kubenswrapper[4684]: E1013 13:24:43.158835 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75fd1cdd-9db0-47d5-be3b-874bea0755b8" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.158859 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="75fd1cdd-9db0-47d5-be3b-874bea0755b8" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: E1013 13:24:43.158912 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3904d99-f791-4d1d-b518-c5e367fa4d39" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.158923 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3904d99-f791-4d1d-b518-c5e367fa4d39" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: E1013 13:24:43.158939 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b2d5063-943d-4cea-b1cd-fc841e23d897" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.158947 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b2d5063-943d-4cea-b1cd-fc841e23d897" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.159148 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b2d5063-943d-4cea-b1cd-fc841e23d897" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.159180 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="75fd1cdd-9db0-47d5-be3b-874bea0755b8" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.159191 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3904d99-f791-4d1d-b518-c5e367fa4d39" containerName="mariadb-account-create" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.159914 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.162142 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.163003 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-s8shq" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.163887 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.167820 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xq69h"] Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.215180 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerStarted","Data":"eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751"} Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.216083 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.237418 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.360157455 podStartE2EDuration="5.237401598s" podCreationTimestamp="2025-10-13 13:24:38 +0000 UTC" firstStartedPulling="2025-10-13 13:24:39.075396772 +0000 UTC m=+1033.642780842" lastFinishedPulling="2025-10-13 13:24:41.952640905 +0000 UTC m=+1036.520024985" observedRunningTime="2025-10-13 13:24:43.230433366 +0000 UTC m=+1037.797817436" watchObservedRunningTime="2025-10-13 13:24:43.237401598 +0000 UTC m=+1037.804785668" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.261797 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.261861 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm4bv\" (UniqueName: \"kubernetes.io/projected/a57c6496-4904-4c3d-b12a-7f06e4f305d3-kube-api-access-pm4bv\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.262159 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-scripts\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.262452 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-config-data\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.364213 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-scripts\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.364601 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-config-data\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.364669 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.364700 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm4bv\" (UniqueName: \"kubernetes.io/projected/a57c6496-4904-4c3d-b12a-7f06e4f305d3-kube-api-access-pm4bv\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.370600 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-scripts\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.372583 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-config-data\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.372595 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.387312 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm4bv\" (UniqueName: \"kubernetes.io/projected/a57c6496-4904-4c3d-b12a-7f06e4f305d3-kube-api-access-pm4bv\") pod \"nova-cell0-conductor-db-sync-xq69h\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.484122 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:24:43 crc kubenswrapper[4684]: I1013 13:24:43.919319 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xq69h"] Oct 13 13:24:43 crc kubenswrapper[4684]: W1013 13:24:43.922389 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda57c6496_4904_4c3d_b12a_7f06e4f305d3.slice/crio-5334343bb5de36213b74c78f22bbae33144a47d95852eae43efecb4f9fd0d6ec WatchSource:0}: Error finding container 5334343bb5de36213b74c78f22bbae33144a47d95852eae43efecb4f9fd0d6ec: Status 404 returned error can't find the container with id 5334343bb5de36213b74c78f22bbae33144a47d95852eae43efecb4f9fd0d6ec Oct 13 13:24:44 crc kubenswrapper[4684]: I1013 13:24:44.225743 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xq69h" event={"ID":"a57c6496-4904-4c3d-b12a-7f06e4f305d3","Type":"ContainerStarted","Data":"5334343bb5de36213b74c78f22bbae33144a47d95852eae43efecb4f9fd0d6ec"} Oct 13 13:24:53 crc kubenswrapper[4684]: I1013 13:24:53.306968 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xq69h" event={"ID":"a57c6496-4904-4c3d-b12a-7f06e4f305d3","Type":"ContainerStarted","Data":"b4388afd3ba22307e08de0aa8ce2372a4ef76cfaf391bdca7df3313b56501a55"} Oct 13 13:24:53 crc kubenswrapper[4684]: I1013 13:24:53.328206 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-xq69h" podStartSLOduration=1.5217366810000001 podStartE2EDuration="10.328186904s" podCreationTimestamp="2025-10-13 13:24:43 +0000 UTC" firstStartedPulling="2025-10-13 13:24:43.924398033 +0000 UTC m=+1038.491782113" lastFinishedPulling="2025-10-13 13:24:52.730848246 +0000 UTC m=+1047.298232336" observedRunningTime="2025-10-13 13:24:53.322245237 +0000 UTC m=+1047.889629307" watchObservedRunningTime="2025-10-13 13:24:53.328186904 +0000 UTC m=+1047.895570964" Oct 13 13:25:04 crc kubenswrapper[4684]: I1013 13:25:04.409912 4684 generic.go:334] "Generic (PLEG): container finished" podID="a57c6496-4904-4c3d-b12a-7f06e4f305d3" containerID="b4388afd3ba22307e08de0aa8ce2372a4ef76cfaf391bdca7df3313b56501a55" exitCode=0 Oct 13 13:25:04 crc kubenswrapper[4684]: I1013 13:25:04.409994 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xq69h" event={"ID":"a57c6496-4904-4c3d-b12a-7f06e4f305d3","Type":"ContainerDied","Data":"b4388afd3ba22307e08de0aa8ce2372a4ef76cfaf391bdca7df3313b56501a55"} Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.807479 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.987516 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-scripts\") pod \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.987895 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-combined-ca-bundle\") pod \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.988084 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-config-data\") pod \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.988298 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm4bv\" (UniqueName: \"kubernetes.io/projected/a57c6496-4904-4c3d-b12a-7f06e4f305d3-kube-api-access-pm4bv\") pod \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\" (UID: \"a57c6496-4904-4c3d-b12a-7f06e4f305d3\") " Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.993785 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-scripts" (OuterVolumeSpecName: "scripts") pod "a57c6496-4904-4c3d-b12a-7f06e4f305d3" (UID: "a57c6496-4904-4c3d-b12a-7f06e4f305d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:05 crc kubenswrapper[4684]: I1013 13:25:05.994080 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a57c6496-4904-4c3d-b12a-7f06e4f305d3-kube-api-access-pm4bv" (OuterVolumeSpecName: "kube-api-access-pm4bv") pod "a57c6496-4904-4c3d-b12a-7f06e4f305d3" (UID: "a57c6496-4904-4c3d-b12a-7f06e4f305d3"). InnerVolumeSpecName "kube-api-access-pm4bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.014584 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a57c6496-4904-4c3d-b12a-7f06e4f305d3" (UID: "a57c6496-4904-4c3d-b12a-7f06e4f305d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.022235 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-config-data" (OuterVolumeSpecName: "config-data") pod "a57c6496-4904-4c3d-b12a-7f06e4f305d3" (UID: "a57c6496-4904-4c3d-b12a-7f06e4f305d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.090344 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.090379 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.090395 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a57c6496-4904-4c3d-b12a-7f06e4f305d3-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.090407 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm4bv\" (UniqueName: \"kubernetes.io/projected/a57c6496-4904-4c3d-b12a-7f06e4f305d3-kube-api-access-pm4bv\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.435501 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-xq69h" event={"ID":"a57c6496-4904-4c3d-b12a-7f06e4f305d3","Type":"ContainerDied","Data":"5334343bb5de36213b74c78f22bbae33144a47d95852eae43efecb4f9fd0d6ec"} Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.435773 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5334343bb5de36213b74c78f22bbae33144a47d95852eae43efecb4f9fd0d6ec" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.435596 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-xq69h" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.535756 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 13 13:25:06 crc kubenswrapper[4684]: E1013 13:25:06.536499 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a57c6496-4904-4c3d-b12a-7f06e4f305d3" containerName="nova-cell0-conductor-db-sync" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.536524 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a57c6496-4904-4c3d-b12a-7f06e4f305d3" containerName="nova-cell0-conductor-db-sync" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.536762 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="a57c6496-4904-4c3d-b12a-7f06e4f305d3" containerName="nova-cell0-conductor-db-sync" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.537517 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.539330 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-s8shq" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.542981 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.547168 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.700175 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.700621 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxv4l\" (UniqueName: \"kubernetes.io/projected/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-kube-api-access-pxv4l\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.700845 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.802675 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxv4l\" (UniqueName: \"kubernetes.io/projected/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-kube-api-access-pxv4l\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.802760 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.802843 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.808585 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.812027 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.819104 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxv4l\" (UniqueName: \"kubernetes.io/projected/f4a657cc-d0c5-4c0d-8171-ee4acd5788ce-kube-api-access-pxv4l\") pod \"nova-cell0-conductor-0\" (UID: \"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce\") " pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:06 crc kubenswrapper[4684]: I1013 13:25:06.857819 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:07 crc kubenswrapper[4684]: I1013 13:25:07.322169 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 13 13:25:07 crc kubenswrapper[4684]: I1013 13:25:07.451182 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce","Type":"ContainerStarted","Data":"77a149799c88297bd522c076b471efa1a3174d81d4759b6c9751b13606065d8a"} Oct 13 13:25:08 crc kubenswrapper[4684]: I1013 13:25:08.463169 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f4a657cc-d0c5-4c0d-8171-ee4acd5788ce","Type":"ContainerStarted","Data":"c48858264468cdc579006202023b383494a7164f671b0e9062eafbf5cbb4b1e3"} Oct 13 13:25:08 crc kubenswrapper[4684]: I1013 13:25:08.463727 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:08 crc kubenswrapper[4684]: I1013 13:25:08.488467 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.488435265 podStartE2EDuration="2.488435265s" podCreationTimestamp="2025-10-13 13:25:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:08.477222833 +0000 UTC m=+1063.044606903" watchObservedRunningTime="2025-10-13 13:25:08.488435265 +0000 UTC m=+1063.055819335" Oct 13 13:25:08 crc kubenswrapper[4684]: I1013 13:25:08.527183 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 13 13:25:11 crc kubenswrapper[4684]: I1013 13:25:11.972427 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:25:11 crc kubenswrapper[4684]: I1013 13:25:11.973265 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d9101813-710f-4c10-8510-415d62289ab1" containerName="kube-state-metrics" containerID="cri-o://4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b" gracePeriod=30 Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.443762 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.498621 4684 generic.go:334] "Generic (PLEG): container finished" podID="d9101813-710f-4c10-8510-415d62289ab1" containerID="4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b" exitCode=2 Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.498657 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.498672 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d9101813-710f-4c10-8510-415d62289ab1","Type":"ContainerDied","Data":"4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b"} Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.498703 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d9101813-710f-4c10-8510-415d62289ab1","Type":"ContainerDied","Data":"2be8e76941bd920cc17ffd973700fc9934bd7bf067fe791c1bd2c3bda4afef22"} Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.498736 4684 scope.go:117] "RemoveContainer" containerID="4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.520115 4684 scope.go:117] "RemoveContainer" containerID="4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b" Oct 13 13:25:12 crc kubenswrapper[4684]: E1013 13:25:12.520557 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b\": container with ID starting with 4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b not found: ID does not exist" containerID="4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.520601 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b"} err="failed to get container status \"4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b\": rpc error: code = NotFound desc = could not find container \"4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b\": container with ID starting with 4dd32e22b5c837aa9ecbd6667eaad24d99321d718fe1083a632580350df1026b not found: ID does not exist" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.520938 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hgzp\" (UniqueName: \"kubernetes.io/projected/d9101813-710f-4c10-8510-415d62289ab1-kube-api-access-5hgzp\") pod \"d9101813-710f-4c10-8510-415d62289ab1\" (UID: \"d9101813-710f-4c10-8510-415d62289ab1\") " Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.529467 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9101813-710f-4c10-8510-415d62289ab1-kube-api-access-5hgzp" (OuterVolumeSpecName: "kube-api-access-5hgzp") pod "d9101813-710f-4c10-8510-415d62289ab1" (UID: "d9101813-710f-4c10-8510-415d62289ab1"). InnerVolumeSpecName "kube-api-access-5hgzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.622323 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hgzp\" (UniqueName: \"kubernetes.io/projected/d9101813-710f-4c10-8510-415d62289ab1-kube-api-access-5hgzp\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.844927 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.852604 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.868702 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:25:12 crc kubenswrapper[4684]: E1013 13:25:12.869249 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9101813-710f-4c10-8510-415d62289ab1" containerName="kube-state-metrics" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.869268 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9101813-710f-4c10-8510-415d62289ab1" containerName="kube-state-metrics" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.869499 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9101813-710f-4c10-8510-415d62289ab1" containerName="kube-state-metrics" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.870253 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.875537 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.876444 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.883943 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.928086 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh722\" (UniqueName: \"kubernetes.io/projected/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-api-access-hh722\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.928496 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.928529 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:12 crc kubenswrapper[4684]: I1013 13:25:12.928559 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.030535 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh722\" (UniqueName: \"kubernetes.io/projected/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-api-access-hh722\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.030589 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.030608 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.030626 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.035334 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.035772 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.045684 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.046173 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh722\" (UniqueName: \"kubernetes.io/projected/09ad2e1c-80bb-4900-8c6b-346959ee7994-kube-api-access-hh722\") pod \"kube-state-metrics-0\" (UID: \"09ad2e1c-80bb-4900-8c6b-346959ee7994\") " pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.194338 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.634656 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.908346 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.909253 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-central-agent" containerID="cri-o://05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c" gracePeriod=30 Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.909392 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-notification-agent" containerID="cri-o://004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1" gracePeriod=30 Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.911851 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="sg-core" containerID="cri-o://7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6" gracePeriod=30 Oct 13 13:25:13 crc kubenswrapper[4684]: I1013 13:25:13.912124 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="proxy-httpd" containerID="cri-o://eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751" gracePeriod=30 Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.361583 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9101813-710f-4c10-8510-415d62289ab1" path="/var/lib/kubelet/pods/d9101813-710f-4c10-8510-415d62289ab1/volumes" Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.522262 4684 generic.go:334] "Generic (PLEG): container finished" podID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerID="eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751" exitCode=0 Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.522545 4684 generic.go:334] "Generic (PLEG): container finished" podID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerID="7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6" exitCode=2 Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.522555 4684 generic.go:334] "Generic (PLEG): container finished" podID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerID="05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c" exitCode=0 Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.522490 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerDied","Data":"eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751"} Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.522613 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerDied","Data":"7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6"} Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.522632 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerDied","Data":"05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c"} Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.525127 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"09ad2e1c-80bb-4900-8c6b-346959ee7994","Type":"ContainerStarted","Data":"f00f9979dc3e8f9d032afb00dcf641c837722e4d643d51a59a9a5d14ff096fd8"} Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.525156 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"09ad2e1c-80bb-4900-8c6b-346959ee7994","Type":"ContainerStarted","Data":"722274e7b11cab22a4cdc6acadd25d484e2cf7eb82d873c1f4e20835e39ddf2b"} Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.526161 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 13 13:25:14 crc kubenswrapper[4684]: I1013 13:25:14.545952 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.9631662250000002 podStartE2EDuration="2.545935215s" podCreationTimestamp="2025-10-13 13:25:12 +0000 UTC" firstStartedPulling="2025-10-13 13:25:13.638401042 +0000 UTC m=+1068.205785112" lastFinishedPulling="2025-10-13 13:25:14.221170032 +0000 UTC m=+1068.788554102" observedRunningTime="2025-10-13 13:25:14.544190161 +0000 UTC m=+1069.111574221" watchObservedRunningTime="2025-10-13 13:25:14.545935215 +0000 UTC m=+1069.113319275" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.289200 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.386942 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-combined-ca-bundle\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.387059 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-config-data\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.387098 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-scripts\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.387125 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsdzr\" (UniqueName: \"kubernetes.io/projected/41330135-d29c-494e-93ec-d67c66cbaf1c-kube-api-access-zsdzr\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.387178 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-sg-core-conf-yaml\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.387247 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-log-httpd\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.387279 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-run-httpd\") pod \"41330135-d29c-494e-93ec-d67c66cbaf1c\" (UID: \"41330135-d29c-494e-93ec-d67c66cbaf1c\") " Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.388663 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.390161 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.394400 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41330135-d29c-494e-93ec-d67c66cbaf1c-kube-api-access-zsdzr" (OuterVolumeSpecName: "kube-api-access-zsdzr") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "kube-api-access-zsdzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.395799 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-scripts" (OuterVolumeSpecName: "scripts") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.424697 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.461709 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.489734 4684 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.489760 4684 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.489769 4684 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41330135-d29c-494e-93ec-d67c66cbaf1c-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.489778 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.489786 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.489794 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsdzr\" (UniqueName: \"kubernetes.io/projected/41330135-d29c-494e-93ec-d67c66cbaf1c-kube-api-access-zsdzr\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.499075 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-config-data" (OuterVolumeSpecName: "config-data") pod "41330135-d29c-494e-93ec-d67c66cbaf1c" (UID: "41330135-d29c-494e-93ec-d67c66cbaf1c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.557256 4684 generic.go:334] "Generic (PLEG): container finished" podID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerID="004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1" exitCode=0 Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.557312 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerDied","Data":"004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1"} Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.557569 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41330135-d29c-494e-93ec-d67c66cbaf1c","Type":"ContainerDied","Data":"be40bd3a518b33206287c1c3f1b4e269c1289c32a8f23f6f4c166c7287802428"} Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.557603 4684 scope.go:117] "RemoveContainer" containerID="eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.557360 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.579028 4684 scope.go:117] "RemoveContainer" containerID="7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.591306 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41330135-d29c-494e-93ec-d67c66cbaf1c-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.619176 4684 scope.go:117] "RemoveContainer" containerID="004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.620443 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.638908 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.646948 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.647371 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-central-agent" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647396 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-central-agent" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.647416 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="proxy-httpd" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647427 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="proxy-httpd" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.647447 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-notification-agent" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647454 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-notification-agent" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.647463 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="sg-core" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647470 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="sg-core" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647677 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="sg-core" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647703 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-notification-agent" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647721 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="proxy-httpd" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.647743 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" containerName="ceilometer-central-agent" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.651408 4684 scope.go:117] "RemoveContainer" containerID="05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.653137 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.655414 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.656639 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.656722 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.656992 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.676976 4684 scope.go:117] "RemoveContainer" containerID="eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.677433 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751\": container with ID starting with eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751 not found: ID does not exist" containerID="eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.679296 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751"} err="failed to get container status \"eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751\": rpc error: code = NotFound desc = could not find container \"eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751\": container with ID starting with eab62cafcdade74864d9b4a8aea353980161e1a7c1e5b8e57374012d3938d751 not found: ID does not exist" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.679334 4684 scope.go:117] "RemoveContainer" containerID="7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.681825 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6\": container with ID starting with 7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6 not found: ID does not exist" containerID="7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.681877 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6"} err="failed to get container status \"7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6\": rpc error: code = NotFound desc = could not find container \"7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6\": container with ID starting with 7806b4a4543252c50d862ec87caf2a8c2e7a98cc3f5721b8a193c89e729df7b6 not found: ID does not exist" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.681936 4684 scope.go:117] "RemoveContainer" containerID="004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.682211 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1\": container with ID starting with 004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1 not found: ID does not exist" containerID="004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.682251 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1"} err="failed to get container status \"004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1\": rpc error: code = NotFound desc = could not find container \"004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1\": container with ID starting with 004de2588ba416858dfd90722f42490428cbbd9b106ac553f0bbd2ec065498e1 not found: ID does not exist" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.682270 4684 scope.go:117] "RemoveContainer" containerID="05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c" Oct 13 13:25:16 crc kubenswrapper[4684]: E1013 13:25:16.682517 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c\": container with ID starting with 05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c not found: ID does not exist" containerID="05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.682543 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c"} err="failed to get container status \"05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c\": rpc error: code = NotFound desc = could not find container \"05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c\": container with ID starting with 05e3d77ee3e951ff62ce9eba2b99a8c659bcd8d42154f137a231025c875e491c not found: ID does not exist" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.692968 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrjsc\" (UniqueName: \"kubernetes.io/projected/92e6f67a-af44-43a1-bbec-ded11be19f59-kube-api-access-zrjsc\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693057 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693163 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693198 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-run-httpd\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693272 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-config-data\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693311 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693346 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-log-httpd\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.693374 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-scripts\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794486 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794562 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794584 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-run-httpd\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794624 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-config-data\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794663 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794705 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-log-httpd\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794731 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-scripts\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.794800 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrjsc\" (UniqueName: \"kubernetes.io/projected/92e6f67a-af44-43a1-bbec-ded11be19f59-kube-api-access-zrjsc\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.795211 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-run-httpd\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.795287 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-log-httpd\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.798198 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.798432 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-config-data\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.798710 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.798869 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-scripts\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.799175 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.818859 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrjsc\" (UniqueName: \"kubernetes.io/projected/92e6f67a-af44-43a1-bbec-ded11be19f59-kube-api-access-zrjsc\") pod \"ceilometer-0\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " pod="openstack/ceilometer-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.886719 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 13 13:25:16 crc kubenswrapper[4684]: I1013 13:25:16.978086 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.370697 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-psw2f"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.372208 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.374838 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.375085 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.385425 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-psw2f"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.409331 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js5h6\" (UniqueName: \"kubernetes.io/projected/a538cd5e-c95c-430e-be87-cdc9256cb876-kube-api-access-js5h6\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.409398 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.409616 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-scripts\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.409953 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-config-data\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: W1013 13:25:17.493760 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92e6f67a_af44_43a1_bbec_ded11be19f59.slice/crio-47fce64c23be447b6fb3e27de23e55929f376f21977305d435aedb77f4fac784 WatchSource:0}: Error finding container 47fce64c23be447b6fb3e27de23e55929f376f21977305d435aedb77f4fac784: Status 404 returned error can't find the container with id 47fce64c23be447b6fb3e27de23e55929f376f21977305d435aedb77f4fac784 Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.509371 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.516253 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-config-data\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.516322 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js5h6\" (UniqueName: \"kubernetes.io/projected/a538cd5e-c95c-430e-be87-cdc9256cb876-kube-api-access-js5h6\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.516348 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.516407 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-scripts\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.526931 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-scripts\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.533206 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-config-data\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.540787 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.543033 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.546302 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.553324 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js5h6\" (UniqueName: \"kubernetes.io/projected/a538cd5e-c95c-430e-be87-cdc9256cb876-kube-api-access-js5h6\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.556535 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-psw2f\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.557611 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.597560 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerStarted","Data":"47fce64c23be447b6fb3e27de23e55929f376f21977305d435aedb77f4fac784"} Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.618268 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.618359 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-config-data\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.618398 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcbws\" (UniqueName: \"kubernetes.io/projected/939914d8-d67b-43c3-a0ee-16136fa489da-kube-api-access-bcbws\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.640246 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.642322 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.646033 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.659076 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.689317 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.690837 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.695749 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720562 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720675 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tldzn\" (UniqueName: \"kubernetes.io/projected/64c7e861-a814-431e-bdc2-30f99cf62006-kube-api-access-tldzn\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720735 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-config-data\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720778 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720802 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x22t\" (UniqueName: \"kubernetes.io/projected/411b395b-0284-4324-af21-e9838cdf1262-kube-api-access-7x22t\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720824 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720863 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-config-data\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720920 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-config-data\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720957 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcbws\" (UniqueName: \"kubernetes.io/projected/939914d8-d67b-43c3-a0ee-16136fa489da-kube-api-access-bcbws\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.720989 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7e861-a814-431e-bdc2-30f99cf62006-logs\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.721009 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/411b395b-0284-4324-af21-e9838cdf1262-logs\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.733838 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.733917 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.752293 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-config-data\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.754356 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcbws\" (UniqueName: \"kubernetes.io/projected/939914d8-d67b-43c3-a0ee-16136fa489da-kube-api-access-bcbws\") pod \"nova-scheduler-0\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.757282 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.797595 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68845c6b95-pcn55"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.799271 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.822852 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-config-data\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823045 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-svc\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823099 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7e861-a814-431e-bdc2-30f99cf62006-logs\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823125 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/411b395b-0284-4324-af21-e9838cdf1262-logs\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823165 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823192 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfm9h\" (UniqueName: \"kubernetes.io/projected/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-kube-api-access-vfm9h\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823223 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-swift-storage-0\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823406 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tldzn\" (UniqueName: \"kubernetes.io/projected/64c7e861-a814-431e-bdc2-30f99cf62006-kube-api-access-tldzn\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823517 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-config\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823599 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-config-data\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823635 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-sb\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823659 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-nb\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823711 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x22t\" (UniqueName: \"kubernetes.io/projected/411b395b-0284-4324-af21-e9838cdf1262-kube-api-access-7x22t\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.823736 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.830788 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7e861-a814-431e-bdc2-30f99cf62006-logs\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.831146 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/411b395b-0284-4324-af21-e9838cdf1262-logs\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.842621 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-config-data\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.850090 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.853796 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-config-data\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.856104 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.866415 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68845c6b95-pcn55"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.913124 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.933704 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-config\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.933784 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-sb\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.933809 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-nb\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.933931 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-svc\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.934010 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfm9h\" (UniqueName: \"kubernetes.io/projected/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-kube-api-access-vfm9h\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.934047 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-swift-storage-0\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.934995 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-config\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.935114 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-swift-storage-0\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.935715 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-nb\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.935886 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-svc\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.936850 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tldzn\" (UniqueName: \"kubernetes.io/projected/64c7e861-a814-431e-bdc2-30f99cf62006-kube-api-access-tldzn\") pod \"nova-metadata-0\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " pod="openstack/nova-metadata-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.937346 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-sb\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.945488 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.952479 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x22t\" (UniqueName: \"kubernetes.io/projected/411b395b-0284-4324-af21-e9838cdf1262-kube-api-access-7x22t\") pod \"nova-api-0\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " pod="openstack/nova-api-0" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.967388 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 13 13:25:17 crc kubenswrapper[4684]: I1013 13:25:17.985216 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.000514 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.000767 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfm9h\" (UniqueName: \"kubernetes.io/projected/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-kube-api-access-vfm9h\") pod \"dnsmasq-dns-68845c6b95-pcn55\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.011334 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.044942 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.045087 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.045128 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grs8j\" (UniqueName: \"kubernetes.io/projected/59b6e322-6196-45af-ae09-a3028d9e2c84-kube-api-access-grs8j\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.045419 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.135766 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.147401 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.147452 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grs8j\" (UniqueName: \"kubernetes.io/projected/59b6e322-6196-45af-ae09-a3028d9e2c84-kube-api-access-grs8j\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.147671 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.153177 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.161667 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.173793 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grs8j\" (UniqueName: \"kubernetes.io/projected/59b6e322-6196-45af-ae09-a3028d9e2c84-kube-api-access-grs8j\") pod \"nova-cell1-novncproxy-0\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.282293 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.414898 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41330135-d29c-494e-93ec-d67c66cbaf1c" path="/var/lib/kubelet/pods/41330135-d29c-494e-93ec-d67c66cbaf1c/volumes" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.462518 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-psw2f"] Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.625413 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerStarted","Data":"92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215"} Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.633564 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-psw2f" event={"ID":"a538cd5e-c95c-430e-be87-cdc9256cb876","Type":"ContainerStarted","Data":"5696f314066688c9a88adfde4e888e0a9b99113fc6b1bee06361511e2be0de7f"} Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.802779 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:18 crc kubenswrapper[4684]: W1013 13:25:18.888973 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6a99f03_7caa_4fa4_856a_26d2ec9a5d19.slice/crio-801edcbbbb1296d857d41105fac35b6e187f5ae6b517090e7c39799b1e3f5ca4 WatchSource:0}: Error finding container 801edcbbbb1296d857d41105fac35b6e187f5ae6b517090e7c39799b1e3f5ca4: Status 404 returned error can't find the container with id 801edcbbbb1296d857d41105fac35b6e187f5ae6b517090e7c39799b1e3f5ca4 Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.892995 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68845c6b95-pcn55"] Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.923051 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.933450 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.946981 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xjt66"] Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.950048 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.952535 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.956873 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.972250 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-scripts\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.972315 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-config-data\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.972338 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frz9v\" (UniqueName: \"kubernetes.io/projected/5097f057-d2b4-41ba-bf72-d31f0d346d06-kube-api-access-frz9v\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.972417 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:18 crc kubenswrapper[4684]: I1013 13:25:18.979158 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xjt66"] Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.059689 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:19 crc kubenswrapper[4684]: W1013 13:25:19.059970 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59b6e322_6196_45af_ae09_a3028d9e2c84.slice/crio-d2929fd94c978232a978eed35f339303ffba5dbff073419bf0d04b6ba0e66f11 WatchSource:0}: Error finding container d2929fd94c978232a978eed35f339303ffba5dbff073419bf0d04b6ba0e66f11: Status 404 returned error can't find the container with id d2929fd94c978232a978eed35f339303ffba5dbff073419bf0d04b6ba0e66f11 Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.082829 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-scripts\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.083048 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-config-data\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.083105 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frz9v\" (UniqueName: \"kubernetes.io/projected/5097f057-d2b4-41ba-bf72-d31f0d346d06-kube-api-access-frz9v\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.083513 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.088724 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-config-data\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.089343 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.089702 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-scripts\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.100535 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frz9v\" (UniqueName: \"kubernetes.io/projected/5097f057-d2b4-41ba-bf72-d31f0d346d06-kube-api-access-frz9v\") pod \"nova-cell1-conductor-db-sync-xjt66\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.280161 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.709244 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerStarted","Data":"e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.709585 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerStarted","Data":"807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.737532 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-psw2f" event={"ID":"a538cd5e-c95c-430e-be87-cdc9256cb876","Type":"ContainerStarted","Data":"d8027453ae279cbf845f8b00002d138204406f067600c4a5b58f53cc193a9b65"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.758637 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-psw2f" podStartSLOduration=2.75862526 podStartE2EDuration="2.75862526s" podCreationTimestamp="2025-10-13 13:25:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:19.75637385 +0000 UTC m=+1074.323757920" watchObservedRunningTime="2025-10-13 13:25:19.75862526 +0000 UTC m=+1074.326009330" Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.767974 4684 generic.go:334] "Generic (PLEG): container finished" podID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerID="a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb" exitCode=0 Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.768050 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" event={"ID":"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19","Type":"ContainerDied","Data":"a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.768072 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" event={"ID":"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19","Type":"ContainerStarted","Data":"801edcbbbb1296d857d41105fac35b6e187f5ae6b517090e7c39799b1e3f5ca4"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.777413 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59b6e322-6196-45af-ae09-a3028d9e2c84","Type":"ContainerStarted","Data":"d2929fd94c978232a978eed35f339303ffba5dbff073419bf0d04b6ba0e66f11"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.791875 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64c7e861-a814-431e-bdc2-30f99cf62006","Type":"ContainerStarted","Data":"764873077c6b4c143b4fdc416e5a04b2d95d9675212a4246b5157112ec145d58"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.795895 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"939914d8-d67b-43c3-a0ee-16136fa489da","Type":"ContainerStarted","Data":"cc42d03d048ce0d1f96e472542f11cfd3de7584a15d17170d1a19b26cab90911"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.817623 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"411b395b-0284-4324-af21-e9838cdf1262","Type":"ContainerStarted","Data":"84260bd04d97ea433282a4203a890843ae2da5e670d34ce6ea3d9f628753027f"} Oct 13 13:25:19 crc kubenswrapper[4684]: I1013 13:25:19.984094 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xjt66"] Oct 13 13:25:20 crc kubenswrapper[4684]: I1013 13:25:20.833264 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xjt66" event={"ID":"5097f057-d2b4-41ba-bf72-d31f0d346d06","Type":"ContainerStarted","Data":"7a7167e6efc1451b903956e96cbf042adc479fe0ba70ea396956093709320553"} Oct 13 13:25:20 crc kubenswrapper[4684]: I1013 13:25:20.833313 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xjt66" event={"ID":"5097f057-d2b4-41ba-bf72-d31f0d346d06","Type":"ContainerStarted","Data":"5aa9ad4bf685622494a110c1f5dc912b1d3f664530462e65df83b43fe1f0889e"} Oct 13 13:25:20 crc kubenswrapper[4684]: I1013 13:25:20.837645 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" event={"ID":"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19","Type":"ContainerStarted","Data":"aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672"} Oct 13 13:25:20 crc kubenswrapper[4684]: I1013 13:25:20.853601 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-xjt66" podStartSLOduration=2.8535840869999998 podStartE2EDuration="2.853584087s" podCreationTimestamp="2025-10-13 13:25:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:20.846522595 +0000 UTC m=+1075.413906665" watchObservedRunningTime="2025-10-13 13:25:20.853584087 +0000 UTC m=+1075.420968157" Oct 13 13:25:20 crc kubenswrapper[4684]: I1013 13:25:20.868718 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" podStartSLOduration=3.868701821 podStartE2EDuration="3.868701821s" podCreationTimestamp="2025-10-13 13:25:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:20.862673492 +0000 UTC m=+1075.430057572" watchObservedRunningTime="2025-10-13 13:25:20.868701821 +0000 UTC m=+1075.436085891" Oct 13 13:25:21 crc kubenswrapper[4684]: I1013 13:25:21.538691 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:21 crc kubenswrapper[4684]: I1013 13:25:21.604252 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:21 crc kubenswrapper[4684]: I1013 13:25:21.862148 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.207747 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.880442 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64c7e861-a814-431e-bdc2-30f99cf62006","Type":"ContainerStarted","Data":"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.880610 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64c7e861-a814-431e-bdc2-30f99cf62006","Type":"ContainerStarted","Data":"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.880741 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-log" containerID="cri-o://9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb" gracePeriod=30 Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.881166 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-metadata" containerID="cri-o://6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5" gracePeriod=30 Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.886740 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"939914d8-d67b-43c3-a0ee-16136fa489da","Type":"ContainerStarted","Data":"4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.888711 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"411b395b-0284-4324-af21-e9838cdf1262","Type":"ContainerStarted","Data":"6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.888735 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"411b395b-0284-4324-af21-e9838cdf1262","Type":"ContainerStarted","Data":"a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.894852 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerStarted","Data":"ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.895804 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.896911 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59b6e322-6196-45af-ae09-a3028d9e2c84","Type":"ContainerStarted","Data":"cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb"} Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.897006 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="59b6e322-6196-45af-ae09-a3028d9e2c84" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb" gracePeriod=30 Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.911380 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.295630906 podStartE2EDuration="6.911362098s" podCreationTimestamp="2025-10-13 13:25:17 +0000 UTC" firstStartedPulling="2025-10-13 13:25:18.929458167 +0000 UTC m=+1073.496842237" lastFinishedPulling="2025-10-13 13:25:22.545189349 +0000 UTC m=+1077.112573429" observedRunningTime="2025-10-13 13:25:23.906456094 +0000 UTC m=+1078.473840164" watchObservedRunningTime="2025-10-13 13:25:23.911362098 +0000 UTC m=+1078.478746168" Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.942179 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.888253342 podStartE2EDuration="7.942163814s" podCreationTimestamp="2025-10-13 13:25:16 +0000 UTC" firstStartedPulling="2025-10-13 13:25:17.496539002 +0000 UTC m=+1072.063923072" lastFinishedPulling="2025-10-13 13:25:22.550449464 +0000 UTC m=+1077.117833544" observedRunningTime="2025-10-13 13:25:23.93469078 +0000 UTC m=+1078.502074870" watchObservedRunningTime="2025-10-13 13:25:23.942163814 +0000 UTC m=+1078.509547884" Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.959779 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.2078527709999998 podStartE2EDuration="6.959759197s" podCreationTimestamp="2025-10-13 13:25:17 +0000 UTC" firstStartedPulling="2025-10-13 13:25:18.792201809 +0000 UTC m=+1073.359585879" lastFinishedPulling="2025-10-13 13:25:22.544108235 +0000 UTC m=+1077.111492305" observedRunningTime="2025-10-13 13:25:23.955131541 +0000 UTC m=+1078.522515611" watchObservedRunningTime="2025-10-13 13:25:23.959759197 +0000 UTC m=+1078.527143267" Oct 13 13:25:23 crc kubenswrapper[4684]: I1013 13:25:23.977707 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.5023855040000003 podStartE2EDuration="6.977692129s" podCreationTimestamp="2025-10-13 13:25:17 +0000 UTC" firstStartedPulling="2025-10-13 13:25:19.069062068 +0000 UTC m=+1073.636446138" lastFinishedPulling="2025-10-13 13:25:22.544368693 +0000 UTC m=+1077.111752763" observedRunningTime="2025-10-13 13:25:23.970563386 +0000 UTC m=+1078.537947456" watchObservedRunningTime="2025-10-13 13:25:23.977692129 +0000 UTC m=+1078.545076199" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.006759 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.380228709 podStartE2EDuration="7.006738271s" podCreationTimestamp="2025-10-13 13:25:17 +0000 UTC" firstStartedPulling="2025-10-13 13:25:18.910859912 +0000 UTC m=+1073.478243982" lastFinishedPulling="2025-10-13 13:25:22.537369474 +0000 UTC m=+1077.104753544" observedRunningTime="2025-10-13 13:25:23.990376118 +0000 UTC m=+1078.557760188" watchObservedRunningTime="2025-10-13 13:25:24.006738271 +0000 UTC m=+1078.574122341" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.487947 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.558633 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-config-data\") pod \"64c7e861-a814-431e-bdc2-30f99cf62006\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.558780 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tldzn\" (UniqueName: \"kubernetes.io/projected/64c7e861-a814-431e-bdc2-30f99cf62006-kube-api-access-tldzn\") pod \"64c7e861-a814-431e-bdc2-30f99cf62006\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.558856 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7e861-a814-431e-bdc2-30f99cf62006-logs\") pod \"64c7e861-a814-431e-bdc2-30f99cf62006\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.558885 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-combined-ca-bundle\") pod \"64c7e861-a814-431e-bdc2-30f99cf62006\" (UID: \"64c7e861-a814-431e-bdc2-30f99cf62006\") " Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.559373 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64c7e861-a814-431e-bdc2-30f99cf62006-logs" (OuterVolumeSpecName: "logs") pod "64c7e861-a814-431e-bdc2-30f99cf62006" (UID: "64c7e861-a814-431e-bdc2-30f99cf62006"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.580135 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64c7e861-a814-431e-bdc2-30f99cf62006-kube-api-access-tldzn" (OuterVolumeSpecName: "kube-api-access-tldzn") pod "64c7e861-a814-431e-bdc2-30f99cf62006" (UID: "64c7e861-a814-431e-bdc2-30f99cf62006"). InnerVolumeSpecName "kube-api-access-tldzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.596186 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-config-data" (OuterVolumeSpecName: "config-data") pod "64c7e861-a814-431e-bdc2-30f99cf62006" (UID: "64c7e861-a814-431e-bdc2-30f99cf62006"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.601221 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64c7e861-a814-431e-bdc2-30f99cf62006" (UID: "64c7e861-a814-431e-bdc2-30f99cf62006"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.660662 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64c7e861-a814-431e-bdc2-30f99cf62006-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.660700 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.660710 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64c7e861-a814-431e-bdc2-30f99cf62006-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.660718 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tldzn\" (UniqueName: \"kubernetes.io/projected/64c7e861-a814-431e-bdc2-30f99cf62006-kube-api-access-tldzn\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917469 4684 generic.go:334] "Generic (PLEG): container finished" podID="64c7e861-a814-431e-bdc2-30f99cf62006" containerID="6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5" exitCode=0 Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917502 4684 generic.go:334] "Generic (PLEG): container finished" podID="64c7e861-a814-431e-bdc2-30f99cf62006" containerID="9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb" exitCode=143 Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917597 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64c7e861-a814-431e-bdc2-30f99cf62006","Type":"ContainerDied","Data":"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5"} Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917641 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64c7e861-a814-431e-bdc2-30f99cf62006","Type":"ContainerDied","Data":"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb"} Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917652 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64c7e861-a814-431e-bdc2-30f99cf62006","Type":"ContainerDied","Data":"764873077c6b4c143b4fdc416e5a04b2d95d9675212a4246b5157112ec145d58"} Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917676 4684 scope.go:117] "RemoveContainer" containerID="6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.917837 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.942075 4684 scope.go:117] "RemoveContainer" containerID="9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.957027 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.977229 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.988243 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:24 crc kubenswrapper[4684]: E1013 13:25:24.988687 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-log" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.988705 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-log" Oct 13 13:25:24 crc kubenswrapper[4684]: E1013 13:25:24.988739 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-metadata" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.988745 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-metadata" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.988943 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-log" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.988999 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" containerName="nova-metadata-metadata" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.990137 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.994108 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.995495 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:24 crc kubenswrapper[4684]: I1013 13:25:24.997783 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.005236 4684 scope.go:117] "RemoveContainer" containerID="6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5" Oct 13 13:25:25 crc kubenswrapper[4684]: E1013 13:25:25.006433 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5\": container with ID starting with 6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5 not found: ID does not exist" containerID="6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.006482 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5"} err="failed to get container status \"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5\": rpc error: code = NotFound desc = could not find container \"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5\": container with ID starting with 6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5 not found: ID does not exist" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.006503 4684 scope.go:117] "RemoveContainer" containerID="9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb" Oct 13 13:25:25 crc kubenswrapper[4684]: E1013 13:25:25.007825 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb\": container with ID starting with 9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb not found: ID does not exist" containerID="9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.007848 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb"} err="failed to get container status \"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb\": rpc error: code = NotFound desc = could not find container \"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb\": container with ID starting with 9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb not found: ID does not exist" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.007860 4684 scope.go:117] "RemoveContainer" containerID="6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.012119 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5"} err="failed to get container status \"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5\": rpc error: code = NotFound desc = could not find container \"6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5\": container with ID starting with 6ce85ba26237fbba7f77a0bdfaf5f5867978eeafa6828a88f9c5ab6d052553e5 not found: ID does not exist" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.012143 4684 scope.go:117] "RemoveContainer" containerID="9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.017177 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb"} err="failed to get container status \"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb\": rpc error: code = NotFound desc = could not find container \"9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb\": container with ID starting with 9c507558c43801abade106a23a1158773f6bcfae5d2c4fcdbed950c0a463baeb not found: ID does not exist" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.071086 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.071154 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxzct\" (UniqueName: \"kubernetes.io/projected/205685b0-da23-43dd-8fec-a66231091afa-kube-api-access-hxzct\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.071226 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/205685b0-da23-43dd-8fec-a66231091afa-logs\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.071263 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.071306 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-config-data\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.172817 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-config-data\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.173069 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.173124 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxzct\" (UniqueName: \"kubernetes.io/projected/205685b0-da23-43dd-8fec-a66231091afa-kube-api-access-hxzct\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.173184 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/205685b0-da23-43dd-8fec-a66231091afa-logs\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.173275 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.174270 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/205685b0-da23-43dd-8fec-a66231091afa-logs\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.181469 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.182662 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-config-data\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.199512 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.216423 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxzct\" (UniqueName: \"kubernetes.io/projected/205685b0-da23-43dd-8fec-a66231091afa-kube-api-access-hxzct\") pod \"nova-metadata-0\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.318970 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:25 crc kubenswrapper[4684]: W1013 13:25:25.801563 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod205685b0_da23_43dd_8fec_a66231091afa.slice/crio-b0ef11030e8e83401c2123d0b73ff2927ff515ddafb3c20f09e7191f7634aa5e WatchSource:0}: Error finding container b0ef11030e8e83401c2123d0b73ff2927ff515ddafb3c20f09e7191f7634aa5e: Status 404 returned error can't find the container with id b0ef11030e8e83401c2123d0b73ff2927ff515ddafb3c20f09e7191f7634aa5e Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.805442 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:25 crc kubenswrapper[4684]: I1013 13:25:25.931127 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"205685b0-da23-43dd-8fec-a66231091afa","Type":"ContainerStarted","Data":"b0ef11030e8e83401c2123d0b73ff2927ff515ddafb3c20f09e7191f7634aa5e"} Oct 13 13:25:26 crc kubenswrapper[4684]: I1013 13:25:26.364142 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64c7e861-a814-431e-bdc2-30f99cf62006" path="/var/lib/kubelet/pods/64c7e861-a814-431e-bdc2-30f99cf62006/volumes" Oct 13 13:25:26 crc kubenswrapper[4684]: I1013 13:25:26.940570 4684 generic.go:334] "Generic (PLEG): container finished" podID="a538cd5e-c95c-430e-be87-cdc9256cb876" containerID="d8027453ae279cbf845f8b00002d138204406f067600c4a5b58f53cc193a9b65" exitCode=0 Oct 13 13:25:26 crc kubenswrapper[4684]: I1013 13:25:26.940666 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-psw2f" event={"ID":"a538cd5e-c95c-430e-be87-cdc9256cb876","Type":"ContainerDied","Data":"d8027453ae279cbf845f8b00002d138204406f067600c4a5b58f53cc193a9b65"} Oct 13 13:25:26 crc kubenswrapper[4684]: I1013 13:25:26.943538 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"205685b0-da23-43dd-8fec-a66231091afa","Type":"ContainerStarted","Data":"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee"} Oct 13 13:25:26 crc kubenswrapper[4684]: I1013 13:25:26.943580 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"205685b0-da23-43dd-8fec-a66231091afa","Type":"ContainerStarted","Data":"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f"} Oct 13 13:25:26 crc kubenswrapper[4684]: I1013 13:25:26.984980 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.984964635 podStartE2EDuration="2.984964635s" podCreationTimestamp="2025-10-13 13:25:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:26.981317502 +0000 UTC m=+1081.548701582" watchObservedRunningTime="2025-10-13 13:25:26.984964635 +0000 UTC m=+1081.552348705" Oct 13 13:25:27 crc kubenswrapper[4684]: I1013 13:25:27.987089 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 13 13:25:27 crc kubenswrapper[4684]: I1013 13:25:27.987138 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.020228 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.046385 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.046709 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.137941 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.202819 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8dc864ccc-874nt"] Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.203116 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerName="dnsmasq-dns" containerID="cri-o://0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918" gracePeriod=10 Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.283223 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.472376 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.554998 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js5h6\" (UniqueName: \"kubernetes.io/projected/a538cd5e-c95c-430e-be87-cdc9256cb876-kube-api-access-js5h6\") pod \"a538cd5e-c95c-430e-be87-cdc9256cb876\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.555723 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-scripts\") pod \"a538cd5e-c95c-430e-be87-cdc9256cb876\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.555825 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-config-data\") pod \"a538cd5e-c95c-430e-be87-cdc9256cb876\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.555861 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-combined-ca-bundle\") pod \"a538cd5e-c95c-430e-be87-cdc9256cb876\" (UID: \"a538cd5e-c95c-430e-be87-cdc9256cb876\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.563677 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-scripts" (OuterVolumeSpecName: "scripts") pod "a538cd5e-c95c-430e-be87-cdc9256cb876" (UID: "a538cd5e-c95c-430e-be87-cdc9256cb876"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.583067 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a538cd5e-c95c-430e-be87-cdc9256cb876-kube-api-access-js5h6" (OuterVolumeSpecName: "kube-api-access-js5h6") pod "a538cd5e-c95c-430e-be87-cdc9256cb876" (UID: "a538cd5e-c95c-430e-be87-cdc9256cb876"). InnerVolumeSpecName "kube-api-access-js5h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.633652 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-config-data" (OuterVolumeSpecName: "config-data") pod "a538cd5e-c95c-430e-be87-cdc9256cb876" (UID: "a538cd5e-c95c-430e-be87-cdc9256cb876"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.674180 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.674217 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.674226 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js5h6\" (UniqueName: \"kubernetes.io/projected/a538cd5e-c95c-430e-be87-cdc9256cb876-kube-api-access-js5h6\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.758122 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a538cd5e-c95c-430e-be87-cdc9256cb876" (UID: "a538cd5e-c95c-430e-be87-cdc9256cb876"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.766740 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.776277 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a538cd5e-c95c-430e-be87-cdc9256cb876-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.877157 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-swift-storage-0\") pod \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.877319 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-config\") pod \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.877677 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k87tz\" (UniqueName: \"kubernetes.io/projected/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-kube-api-access-k87tz\") pod \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.877734 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-svc\") pod \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.877781 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-nb\") pod \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.877934 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-sb\") pod \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\" (UID: \"1779fa84-a997-4fa9-8c51-9fbb2949b8f7\") " Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.881751 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-kube-api-access-k87tz" (OuterVolumeSpecName: "kube-api-access-k87tz") pod "1779fa84-a997-4fa9-8c51-9fbb2949b8f7" (UID: "1779fa84-a997-4fa9-8c51-9fbb2949b8f7"). InnerVolumeSpecName "kube-api-access-k87tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.924889 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1779fa84-a997-4fa9-8c51-9fbb2949b8f7" (UID: "1779fa84-a997-4fa9-8c51-9fbb2949b8f7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.931268 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1779fa84-a997-4fa9-8c51-9fbb2949b8f7" (UID: "1779fa84-a997-4fa9-8c51-9fbb2949b8f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.932743 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1779fa84-a997-4fa9-8c51-9fbb2949b8f7" (UID: "1779fa84-a997-4fa9-8c51-9fbb2949b8f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.936956 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-config" (OuterVolumeSpecName: "config") pod "1779fa84-a997-4fa9-8c51-9fbb2949b8f7" (UID: "1779fa84-a997-4fa9-8c51-9fbb2949b8f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.943272 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1779fa84-a997-4fa9-8c51-9fbb2949b8f7" (UID: "1779fa84-a997-4fa9-8c51-9fbb2949b8f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.959606 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-psw2f" event={"ID":"a538cd5e-c95c-430e-be87-cdc9256cb876","Type":"ContainerDied","Data":"5696f314066688c9a88adfde4e888e0a9b99113fc6b1bee06361511e2be0de7f"} Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.959642 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5696f314066688c9a88adfde4e888e0a9b99113fc6b1bee06361511e2be0de7f" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.959688 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-psw2f" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.971588 4684 generic.go:334] "Generic (PLEG): container finished" podID="5097f057-d2b4-41ba-bf72-d31f0d346d06" containerID="7a7167e6efc1451b903956e96cbf042adc479fe0ba70ea396956093709320553" exitCode=0 Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.971663 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xjt66" event={"ID":"5097f057-d2b4-41ba-bf72-d31f0d346d06","Type":"ContainerDied","Data":"7a7167e6efc1451b903956e96cbf042adc479fe0ba70ea396956093709320553"} Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980681 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980714 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980728 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980740 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980751 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980762 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k87tz\" (UniqueName: \"kubernetes.io/projected/1779fa84-a997-4fa9-8c51-9fbb2949b8f7-kube-api-access-k87tz\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980791 4684 generic.go:334] "Generic (PLEG): container finished" podID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerID="0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918" exitCode=0 Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980858 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980888 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" event={"ID":"1779fa84-a997-4fa9-8c51-9fbb2949b8f7","Type":"ContainerDied","Data":"0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918"} Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980929 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8dc864ccc-874nt" event={"ID":"1779fa84-a997-4fa9-8c51-9fbb2949b8f7","Type":"ContainerDied","Data":"37ac213fb8bf054f77585dfec11a8041a395e987b4ef630d64e690222319f6b8"} Oct 13 13:25:28 crc kubenswrapper[4684]: I1013 13:25:28.980947 4684 scope.go:117] "RemoveContainer" containerID="0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.033586 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8dc864ccc-874nt"] Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.042108 4684 scope.go:117] "RemoveContainer" containerID="64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.043936 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8dc864ccc-874nt"] Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.051419 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.075436 4684 scope.go:117] "RemoveContainer" containerID="0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918" Oct 13 13:25:29 crc kubenswrapper[4684]: E1013 13:25:29.075854 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918\": container with ID starting with 0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918 not found: ID does not exist" containerID="0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.075883 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918"} err="failed to get container status \"0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918\": rpc error: code = NotFound desc = could not find container \"0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918\": container with ID starting with 0a7eb6c19f82d32f681f7de74527cbaf99bbfbb03014333793c3d4c6cdd06918 not found: ID does not exist" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.075919 4684 scope.go:117] "RemoveContainer" containerID="64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1" Oct 13 13:25:29 crc kubenswrapper[4684]: E1013 13:25:29.076148 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1\": container with ID starting with 64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1 not found: ID does not exist" containerID="64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.076167 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1"} err="failed to get container status \"64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1\": rpc error: code = NotFound desc = could not find container \"64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1\": container with ID starting with 64a1cec764f6685404a40485ae4f94eba15dc9a86aef37c7e748e5a217a358a1 not found: ID does not exist" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.129131 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.129244 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.143252 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.143501 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-log" containerID="cri-o://a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea" gracePeriod=30 Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.143647 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-api" containerID="cri-o://6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd" gracePeriod=30 Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.204249 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.204734 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-log" containerID="cri-o://f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f" gracePeriod=30 Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.204830 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-metadata" containerID="cri-o://57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee" gracePeriod=30 Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.594312 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.771739 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.898635 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-combined-ca-bundle\") pod \"205685b0-da23-43dd-8fec-a66231091afa\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.898699 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxzct\" (UniqueName: \"kubernetes.io/projected/205685b0-da23-43dd-8fec-a66231091afa-kube-api-access-hxzct\") pod \"205685b0-da23-43dd-8fec-a66231091afa\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.898739 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-nova-metadata-tls-certs\") pod \"205685b0-da23-43dd-8fec-a66231091afa\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.898832 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/205685b0-da23-43dd-8fec-a66231091afa-logs\") pod \"205685b0-da23-43dd-8fec-a66231091afa\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.899002 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-config-data\") pod \"205685b0-da23-43dd-8fec-a66231091afa\" (UID: \"205685b0-da23-43dd-8fec-a66231091afa\") " Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.899362 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/205685b0-da23-43dd-8fec-a66231091afa-logs" (OuterVolumeSpecName: "logs") pod "205685b0-da23-43dd-8fec-a66231091afa" (UID: "205685b0-da23-43dd-8fec-a66231091afa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.899630 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/205685b0-da23-43dd-8fec-a66231091afa-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.907276 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/205685b0-da23-43dd-8fec-a66231091afa-kube-api-access-hxzct" (OuterVolumeSpecName: "kube-api-access-hxzct") pod "205685b0-da23-43dd-8fec-a66231091afa" (UID: "205685b0-da23-43dd-8fec-a66231091afa"). InnerVolumeSpecName "kube-api-access-hxzct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.929707 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "205685b0-da23-43dd-8fec-a66231091afa" (UID: "205685b0-da23-43dd-8fec-a66231091afa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.930573 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-config-data" (OuterVolumeSpecName: "config-data") pod "205685b0-da23-43dd-8fec-a66231091afa" (UID: "205685b0-da23-43dd-8fec-a66231091afa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.951820 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "205685b0-da23-43dd-8fec-a66231091afa" (UID: "205685b0-da23-43dd-8fec-a66231091afa"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.995748 4684 generic.go:334] "Generic (PLEG): container finished" podID="205685b0-da23-43dd-8fec-a66231091afa" containerID="57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee" exitCode=0 Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.995788 4684 generic.go:334] "Generic (PLEG): container finished" podID="205685b0-da23-43dd-8fec-a66231091afa" containerID="f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f" exitCode=143 Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.995864 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"205685b0-da23-43dd-8fec-a66231091afa","Type":"ContainerDied","Data":"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee"} Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.995898 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"205685b0-da23-43dd-8fec-a66231091afa","Type":"ContainerDied","Data":"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f"} Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.995929 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"205685b0-da23-43dd-8fec-a66231091afa","Type":"ContainerDied","Data":"b0ef11030e8e83401c2123d0b73ff2927ff515ddafb3c20f09e7191f7634aa5e"} Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.995949 4684 scope.go:117] "RemoveContainer" containerID="57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee" Oct 13 13:25:29 crc kubenswrapper[4684]: I1013 13:25:29.996077 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.001567 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.001596 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxzct\" (UniqueName: \"kubernetes.io/projected/205685b0-da23-43dd-8fec-a66231091afa-kube-api-access-hxzct\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.001607 4684 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.001616 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/205685b0-da23-43dd-8fec-a66231091afa-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.002289 4684 generic.go:334] "Generic (PLEG): container finished" podID="411b395b-0284-4324-af21-e9838cdf1262" containerID="a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea" exitCode=143 Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.002380 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"411b395b-0284-4324-af21-e9838cdf1262","Type":"ContainerDied","Data":"a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea"} Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.039393 4684 scope.go:117] "RemoveContainer" containerID="f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.043972 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.066436 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.077214 4684 scope.go:117] "RemoveContainer" containerID="57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee" Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.079046 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee\": container with ID starting with 57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee not found: ID does not exist" containerID="57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.079098 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee"} err="failed to get container status \"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee\": rpc error: code = NotFound desc = could not find container \"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee\": container with ID starting with 57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee not found: ID does not exist" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.079128 4684 scope.go:117] "RemoveContainer" containerID="f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f" Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.079524 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f\": container with ID starting with f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f not found: ID does not exist" containerID="f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.079580 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f"} err="failed to get container status \"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f\": rpc error: code = NotFound desc = could not find container \"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f\": container with ID starting with f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f not found: ID does not exist" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.079609 4684 scope.go:117] "RemoveContainer" containerID="57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.083032 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee"} err="failed to get container status \"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee\": rpc error: code = NotFound desc = could not find container \"57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee\": container with ID starting with 57ec1b48dae3fe5896c78bd748eae9658569656ebd42d9c7a0b3872a62900aee not found: ID does not exist" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.083120 4684 scope.go:117] "RemoveContainer" containerID="f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.089948 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f"} err="failed to get container status \"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f\": rpc error: code = NotFound desc = could not find container \"f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f\": container with ID starting with f906d658b5f62128d2916809074631d8dc982234401f6ca5c4418ff209f2565f not found: ID does not exist" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.106937 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.107398 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a538cd5e-c95c-430e-be87-cdc9256cb876" containerName="nova-manage" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107434 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="a538cd5e-c95c-430e-be87-cdc9256cb876" containerName="nova-manage" Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.107453 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerName="init" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107459 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerName="init" Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.107476 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-log" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107499 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-log" Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.107513 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-metadata" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107519 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-metadata" Oct 13 13:25:30 crc kubenswrapper[4684]: E1013 13:25:30.107528 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerName="dnsmasq-dns" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107534 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerName="dnsmasq-dns" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107806 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-metadata" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107831 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="205685b0-da23-43dd-8fec-a66231091afa" containerName="nova-metadata-log" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107846 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="a538cd5e-c95c-430e-be87-cdc9256cb876" containerName="nova-manage" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.107882 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" containerName="dnsmasq-dns" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.116565 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.120931 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.124558 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.124984 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.209636 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nfqq\" (UniqueName: \"kubernetes.io/projected/15a73c00-9963-44b4-8e6f-a8e68e929a9e-kube-api-access-2nfqq\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.209702 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.209751 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.209791 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a73c00-9963-44b4-8e6f-a8e68e929a9e-logs\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.209827 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-config-data\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.312078 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a73c00-9963-44b4-8e6f-a8e68e929a9e-logs\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.312148 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-config-data\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.312305 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nfqq\" (UniqueName: \"kubernetes.io/projected/15a73c00-9963-44b4-8e6f-a8e68e929a9e-kube-api-access-2nfqq\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.312336 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.312379 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.314704 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a73c00-9963-44b4-8e6f-a8e68e929a9e-logs\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.320390 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.333610 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.334346 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-config-data\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.342211 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nfqq\" (UniqueName: \"kubernetes.io/projected/15a73c00-9963-44b4-8e6f-a8e68e929a9e-kube-api-access-2nfqq\") pod \"nova-metadata-0\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.361642 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1779fa84-a997-4fa9-8c51-9fbb2949b8f7" path="/var/lib/kubelet/pods/1779fa84-a997-4fa9-8c51-9fbb2949b8f7/volumes" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.362554 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="205685b0-da23-43dd-8fec-a66231091afa" path="/var/lib/kubelet/pods/205685b0-da23-43dd-8fec-a66231091afa/volumes" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.440882 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.457529 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.516543 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frz9v\" (UniqueName: \"kubernetes.io/projected/5097f057-d2b4-41ba-bf72-d31f0d346d06-kube-api-access-frz9v\") pod \"5097f057-d2b4-41ba-bf72-d31f0d346d06\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.516739 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-config-data\") pod \"5097f057-d2b4-41ba-bf72-d31f0d346d06\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.516815 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-scripts\") pod \"5097f057-d2b4-41ba-bf72-d31f0d346d06\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.516841 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-combined-ca-bundle\") pod \"5097f057-d2b4-41ba-bf72-d31f0d346d06\" (UID: \"5097f057-d2b4-41ba-bf72-d31f0d346d06\") " Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.527844 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-scripts" (OuterVolumeSpecName: "scripts") pod "5097f057-d2b4-41ba-bf72-d31f0d346d06" (UID: "5097f057-d2b4-41ba-bf72-d31f0d346d06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.532131 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5097f057-d2b4-41ba-bf72-d31f0d346d06-kube-api-access-frz9v" (OuterVolumeSpecName: "kube-api-access-frz9v") pod "5097f057-d2b4-41ba-bf72-d31f0d346d06" (UID: "5097f057-d2b4-41ba-bf72-d31f0d346d06"). InnerVolumeSpecName "kube-api-access-frz9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.549542 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5097f057-d2b4-41ba-bf72-d31f0d346d06" (UID: "5097f057-d2b4-41ba-bf72-d31f0d346d06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.573991 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-config-data" (OuterVolumeSpecName: "config-data") pod "5097f057-d2b4-41ba-bf72-d31f0d346d06" (UID: "5097f057-d2b4-41ba-bf72-d31f0d346d06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.574327 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.574364 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.619471 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.619503 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.619512 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5097f057-d2b4-41ba-bf72-d31f0d346d06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.619521 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frz9v\" (UniqueName: \"kubernetes.io/projected/5097f057-d2b4-41ba-bf72-d31f0d346d06-kube-api-access-frz9v\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:30 crc kubenswrapper[4684]: I1013 13:25:30.927403 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.013630 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15a73c00-9963-44b4-8e6f-a8e68e929a9e","Type":"ContainerStarted","Data":"dc6b1cae58139ee0f58a872ed104913635486b9c4ae7b400cdc5a002e686c47b"} Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.016963 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="939914d8-d67b-43c3-a0ee-16136fa489da" containerName="nova-scheduler-scheduler" containerID="cri-o://4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3" gracePeriod=30 Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.017376 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xjt66" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.019346 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xjt66" event={"ID":"5097f057-d2b4-41ba-bf72-d31f0d346d06","Type":"ContainerDied","Data":"5aa9ad4bf685622494a110c1f5dc912b1d3f664530462e65df83b43fe1f0889e"} Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.019388 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5aa9ad4bf685622494a110c1f5dc912b1d3f664530462e65df83b43fe1f0889e" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.078628 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 13 13:25:31 crc kubenswrapper[4684]: E1013 13:25:31.079086 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5097f057-d2b4-41ba-bf72-d31f0d346d06" containerName="nova-cell1-conductor-db-sync" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.079100 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5097f057-d2b4-41ba-bf72-d31f0d346d06" containerName="nova-cell1-conductor-db-sync" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.079293 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="5097f057-d2b4-41ba-bf72-d31f0d346d06" containerName="nova-cell1-conductor-db-sync" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.079988 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.082210 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.087685 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.128200 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klh2n\" (UniqueName: \"kubernetes.io/projected/183e5916-8b09-4bef-85d2-ee83326d865d-kube-api-access-klh2n\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.128341 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/183e5916-8b09-4bef-85d2-ee83326d865d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.128370 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/183e5916-8b09-4bef-85d2-ee83326d865d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.229607 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/183e5916-8b09-4bef-85d2-ee83326d865d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.229675 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/183e5916-8b09-4bef-85d2-ee83326d865d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.229751 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klh2n\" (UniqueName: \"kubernetes.io/projected/183e5916-8b09-4bef-85d2-ee83326d865d-kube-api-access-klh2n\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.234200 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/183e5916-8b09-4bef-85d2-ee83326d865d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.234841 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/183e5916-8b09-4bef-85d2-ee83326d865d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.254500 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klh2n\" (UniqueName: \"kubernetes.io/projected/183e5916-8b09-4bef-85d2-ee83326d865d-kube-api-access-klh2n\") pod \"nova-cell1-conductor-0\" (UID: \"183e5916-8b09-4bef-85d2-ee83326d865d\") " pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.399715 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:31 crc kubenswrapper[4684]: I1013 13:25:31.904744 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 13 13:25:32 crc kubenswrapper[4684]: I1013 13:25:32.025507 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"183e5916-8b09-4bef-85d2-ee83326d865d","Type":"ContainerStarted","Data":"951946e762b229ccb7c17e4272410becfa8506a62dce5410ee1d43c005c1bbf7"} Oct 13 13:25:32 crc kubenswrapper[4684]: I1013 13:25:32.027126 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15a73c00-9963-44b4-8e6f-a8e68e929a9e","Type":"ContainerStarted","Data":"c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4"} Oct 13 13:25:32 crc kubenswrapper[4684]: I1013 13:25:32.027162 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15a73c00-9963-44b4-8e6f-a8e68e929a9e","Type":"ContainerStarted","Data":"40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6"} Oct 13 13:25:32 crc kubenswrapper[4684]: I1013 13:25:32.047939 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.04788421 podStartE2EDuration="2.04788421s" podCreationTimestamp="2025-10-13 13:25:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:32.041343965 +0000 UTC m=+1086.608728065" watchObservedRunningTime="2025-10-13 13:25:32.04788421 +0000 UTC m=+1086.615268280" Oct 13 13:25:32 crc kubenswrapper[4684]: E1013 13:25:32.989780 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 13:25:32 crc kubenswrapper[4684]: E1013 13:25:32.992853 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 13:25:32 crc kubenswrapper[4684]: E1013 13:25:32.994416 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 13:25:32 crc kubenswrapper[4684]: E1013 13:25:32.994482 4684 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="939914d8-d67b-43c3-a0ee-16136fa489da" containerName="nova-scheduler-scheduler" Oct 13 13:25:33 crc kubenswrapper[4684]: I1013 13:25:33.038771 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"183e5916-8b09-4bef-85d2-ee83326d865d","Type":"ContainerStarted","Data":"67322276334afd99870ee710de7b5577a35c120aefae3e64b6dafbbb3fac1906"} Oct 13 13:25:33 crc kubenswrapper[4684]: I1013 13:25:33.062541 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.062522315 podStartE2EDuration="2.062522315s" podCreationTimestamp="2025-10-13 13:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:33.055245307 +0000 UTC m=+1087.622629377" watchObservedRunningTime="2025-10-13 13:25:33.062522315 +0000 UTC m=+1087.629906385" Oct 13 13:25:34 crc kubenswrapper[4684]: I1013 13:25:34.047369 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.063970 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.066127 4684 generic.go:334] "Generic (PLEG): container finished" podID="939914d8-d67b-43c3-a0ee-16136fa489da" containerID="4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3" exitCode=0 Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.066182 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"939914d8-d67b-43c3-a0ee-16136fa489da","Type":"ContainerDied","Data":"4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3"} Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.067440 4684 generic.go:334] "Generic (PLEG): container finished" podID="411b395b-0284-4324-af21-e9838cdf1262" containerID="6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd" exitCode=0 Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.068308 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.068451 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"411b395b-0284-4324-af21-e9838cdf1262","Type":"ContainerDied","Data":"6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd"} Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.068470 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"411b395b-0284-4324-af21-e9838cdf1262","Type":"ContainerDied","Data":"84260bd04d97ea433282a4203a890843ae2da5e670d34ce6ea3d9f628753027f"} Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.068486 4684 scope.go:117] "RemoveContainer" containerID="6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.103755 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x22t\" (UniqueName: \"kubernetes.io/projected/411b395b-0284-4324-af21-e9838cdf1262-kube-api-access-7x22t\") pod \"411b395b-0284-4324-af21-e9838cdf1262\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.104109 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-config-data\") pod \"411b395b-0284-4324-af21-e9838cdf1262\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.104226 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/411b395b-0284-4324-af21-e9838cdf1262-logs\") pod \"411b395b-0284-4324-af21-e9838cdf1262\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.104253 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-combined-ca-bundle\") pod \"411b395b-0284-4324-af21-e9838cdf1262\" (UID: \"411b395b-0284-4324-af21-e9838cdf1262\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.105688 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/411b395b-0284-4324-af21-e9838cdf1262-logs" (OuterVolumeSpecName: "logs") pod "411b395b-0284-4324-af21-e9838cdf1262" (UID: "411b395b-0284-4324-af21-e9838cdf1262"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.133070 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/411b395b-0284-4324-af21-e9838cdf1262-kube-api-access-7x22t" (OuterVolumeSpecName: "kube-api-access-7x22t") pod "411b395b-0284-4324-af21-e9838cdf1262" (UID: "411b395b-0284-4324-af21-e9838cdf1262"). InnerVolumeSpecName "kube-api-access-7x22t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.160073 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-config-data" (OuterVolumeSpecName: "config-data") pod "411b395b-0284-4324-af21-e9838cdf1262" (UID: "411b395b-0284-4324-af21-e9838cdf1262"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.169084 4684 scope.go:117] "RemoveContainer" containerID="a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.208826 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x22t\" (UniqueName: \"kubernetes.io/projected/411b395b-0284-4324-af21-e9838cdf1262-kube-api-access-7x22t\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.208853 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.208863 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/411b395b-0284-4324-af21-e9838cdf1262-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.220291 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "411b395b-0284-4324-af21-e9838cdf1262" (UID: "411b395b-0284-4324-af21-e9838cdf1262"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.222708 4684 scope.go:117] "RemoveContainer" containerID="6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd" Oct 13 13:25:35 crc kubenswrapper[4684]: E1013 13:25:35.223138 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd\": container with ID starting with 6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd not found: ID does not exist" containerID="6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.223176 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd"} err="failed to get container status \"6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd\": rpc error: code = NotFound desc = could not find container \"6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd\": container with ID starting with 6d5f27c226dea5d7b51ba3361247740d6dad6609ef40c6f9c791ba5be1066ffd not found: ID does not exist" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.223200 4684 scope.go:117] "RemoveContainer" containerID="a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea" Oct 13 13:25:35 crc kubenswrapper[4684]: E1013 13:25:35.223413 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea\": container with ID starting with a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea not found: ID does not exist" containerID="a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.223430 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea"} err="failed to get container status \"a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea\": rpc error: code = NotFound desc = could not find container \"a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea\": container with ID starting with a013a5daa456a0b1c063147bfddda3a4dd8a4464ec799b123a608259da060bea not found: ID does not exist" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.250008 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.317101 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-combined-ca-bundle\") pod \"939914d8-d67b-43c3-a0ee-16136fa489da\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.317208 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-config-data\") pod \"939914d8-d67b-43c3-a0ee-16136fa489da\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.317275 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcbws\" (UniqueName: \"kubernetes.io/projected/939914d8-d67b-43c3-a0ee-16136fa489da-kube-api-access-bcbws\") pod \"939914d8-d67b-43c3-a0ee-16136fa489da\" (UID: \"939914d8-d67b-43c3-a0ee-16136fa489da\") " Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.317727 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/411b395b-0284-4324-af21-e9838cdf1262-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.321943 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/939914d8-d67b-43c3-a0ee-16136fa489da-kube-api-access-bcbws" (OuterVolumeSpecName: "kube-api-access-bcbws") pod "939914d8-d67b-43c3-a0ee-16136fa489da" (UID: "939914d8-d67b-43c3-a0ee-16136fa489da"). InnerVolumeSpecName "kube-api-access-bcbws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.353029 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "939914d8-d67b-43c3-a0ee-16136fa489da" (UID: "939914d8-d67b-43c3-a0ee-16136fa489da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.353082 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-config-data" (OuterVolumeSpecName: "config-data") pod "939914d8-d67b-43c3-a0ee-16136fa489da" (UID: "939914d8-d67b-43c3-a0ee-16136fa489da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.408295 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.419595 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcbws\" (UniqueName: \"kubernetes.io/projected/939914d8-d67b-43c3-a0ee-16136fa489da-kube-api-access-bcbws\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.419835 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.419956 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/939914d8-d67b-43c3-a0ee-16136fa489da-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.426645 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.435932 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:35 crc kubenswrapper[4684]: E1013 13:25:35.436381 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="939914d8-d67b-43c3-a0ee-16136fa489da" containerName="nova-scheduler-scheduler" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.436400 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="939914d8-d67b-43c3-a0ee-16136fa489da" containerName="nova-scheduler-scheduler" Oct 13 13:25:35 crc kubenswrapper[4684]: E1013 13:25:35.436414 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-api" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.436423 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-api" Oct 13 13:25:35 crc kubenswrapper[4684]: E1013 13:25:35.436431 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-log" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.436438 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-log" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.436651 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="939914d8-d67b-43c3-a0ee-16136fa489da" containerName="nova-scheduler-scheduler" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.436666 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-api" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.436690 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="411b395b-0284-4324-af21-e9838cdf1262" containerName="nova-api-log" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.437798 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.440034 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.441096 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.441178 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.445535 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.521440 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.521560 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-config-data\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.521690 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw5f9\" (UniqueName: \"kubernetes.io/projected/14e2d01c-4826-4aa5-b1c6-686374eee6fa-kube-api-access-kw5f9\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.521716 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e2d01c-4826-4aa5-b1c6-686374eee6fa-logs\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.622982 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw5f9\" (UniqueName: \"kubernetes.io/projected/14e2d01c-4826-4aa5-b1c6-686374eee6fa-kube-api-access-kw5f9\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.623036 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e2d01c-4826-4aa5-b1c6-686374eee6fa-logs\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.623132 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.623209 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-config-data\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.623573 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e2d01c-4826-4aa5-b1c6-686374eee6fa-logs\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.626751 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.628520 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-config-data\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.641423 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw5f9\" (UniqueName: \"kubernetes.io/projected/14e2d01c-4826-4aa5-b1c6-686374eee6fa-kube-api-access-kw5f9\") pod \"nova-api-0\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " pod="openstack/nova-api-0" Oct 13 13:25:35 crc kubenswrapper[4684]: I1013 13:25:35.765164 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.078669 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"939914d8-d67b-43c3-a0ee-16136fa489da","Type":"ContainerDied","Data":"cc42d03d048ce0d1f96e472542f11cfd3de7584a15d17170d1a19b26cab90911"} Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.079022 4684 scope.go:117] "RemoveContainer" containerID="4a5b0e128fc1f7ec974869f40a3dfa15de2f6f25c16ffc90e0446b00fcd62bf3" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.079149 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.124420 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.134069 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.153657 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.155096 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.159235 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.170977 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.227306 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.241088 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-config-data\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.241168 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.241286 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf5g9\" (UniqueName: \"kubernetes.io/projected/8265b2be-813d-48d3-b1d5-9a7c93215b54-kube-api-access-gf5g9\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.343341 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-config-data\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.343394 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.343472 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf5g9\" (UniqueName: \"kubernetes.io/projected/8265b2be-813d-48d3-b1d5-9a7c93215b54-kube-api-access-gf5g9\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.349552 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-config-data\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.349560 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.362767 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="411b395b-0284-4324-af21-e9838cdf1262" path="/var/lib/kubelet/pods/411b395b-0284-4324-af21-e9838cdf1262/volumes" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.363393 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="939914d8-d67b-43c3-a0ee-16136fa489da" path="/var/lib/kubelet/pods/939914d8-d67b-43c3-a0ee-16136fa489da/volumes" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.364523 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf5g9\" (UniqueName: \"kubernetes.io/projected/8265b2be-813d-48d3-b1d5-9a7c93215b54-kube-api-access-gf5g9\") pod \"nova-scheduler-0\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.473894 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:25:36 crc kubenswrapper[4684]: I1013 13:25:36.907969 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:25:37 crc kubenswrapper[4684]: I1013 13:25:37.097271 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14e2d01c-4826-4aa5-b1c6-686374eee6fa","Type":"ContainerStarted","Data":"76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5"} Oct 13 13:25:37 crc kubenswrapper[4684]: I1013 13:25:37.098937 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14e2d01c-4826-4aa5-b1c6-686374eee6fa","Type":"ContainerStarted","Data":"1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692"} Oct 13 13:25:37 crc kubenswrapper[4684]: I1013 13:25:37.099058 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14e2d01c-4826-4aa5-b1c6-686374eee6fa","Type":"ContainerStarted","Data":"8f5a5fc6fb247c346760bffec705f87d8c8cdefee4bac737ec7b0b056398d7db"} Oct 13 13:25:37 crc kubenswrapper[4684]: I1013 13:25:37.103334 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8265b2be-813d-48d3-b1d5-9a7c93215b54","Type":"ContainerStarted","Data":"0643b1a9bf6a54045213f959c3ab3458b9fe41e6f393401760ccb3c253dea516"} Oct 13 13:25:37 crc kubenswrapper[4684]: I1013 13:25:37.124457 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.124426612 podStartE2EDuration="2.124426612s" podCreationTimestamp="2025-10-13 13:25:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:37.116225264 +0000 UTC m=+1091.683609344" watchObservedRunningTime="2025-10-13 13:25:37.124426612 +0000 UTC m=+1091.691810682" Oct 13 13:25:38 crc kubenswrapper[4684]: I1013 13:25:38.113708 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8265b2be-813d-48d3-b1d5-9a7c93215b54","Type":"ContainerStarted","Data":"79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78"} Oct 13 13:25:38 crc kubenswrapper[4684]: I1013 13:25:38.126781 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.126765401 podStartE2EDuration="2.126765401s" podCreationTimestamp="2025-10-13 13:25:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:38.124739498 +0000 UTC m=+1092.692123558" watchObservedRunningTime="2025-10-13 13:25:38.126765401 +0000 UTC m=+1092.694149471" Oct 13 13:25:40 crc kubenswrapper[4684]: I1013 13:25:40.441305 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 13:25:40 crc kubenswrapper[4684]: I1013 13:25:40.441779 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 13:25:41 crc kubenswrapper[4684]: I1013 13:25:41.432846 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 13 13:25:41 crc kubenswrapper[4684]: I1013 13:25:41.458096 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 13:25:41 crc kubenswrapper[4684]: I1013 13:25:41.458094 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 13:25:41 crc kubenswrapper[4684]: I1013 13:25:41.474974 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 13 13:25:45 crc kubenswrapper[4684]: I1013 13:25:45.765958 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:25:45 crc kubenswrapper[4684]: I1013 13:25:45.766455 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:25:46 crc kubenswrapper[4684]: E1013 13:25:46.391947 4684 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/bc0192a28f598f2f8ac106dbd440577325bb14c359832d4470bd27327927a899/diff" to get inode usage: stat /var/lib/containers/storage/overlay/bc0192a28f598f2f8ac106dbd440577325bb14c359832d4470bd27327927a899/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_dnsmasq-dns-8dc864ccc-874nt_1779fa84-a997-4fa9-8c51-9fbb2949b8f7/dnsmasq-dns/0.log" to get inode usage: stat /var/log/pods/openstack_dnsmasq-dns-8dc864ccc-874nt_1779fa84-a997-4fa9-8c51-9fbb2949b8f7/dnsmasq-dns/0.log: no such file or directory Oct 13 13:25:46 crc kubenswrapper[4684]: I1013 13:25:46.474853 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 13 13:25:46 crc kubenswrapper[4684]: I1013 13:25:46.514528 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 13 13:25:46 crc kubenswrapper[4684]: I1013 13:25:46.849115 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 13:25:46 crc kubenswrapper[4684]: I1013 13:25:46.849127 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 13:25:46 crc kubenswrapper[4684]: I1013 13:25:46.990680 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 13 13:25:47 crc kubenswrapper[4684]: I1013 13:25:47.207891 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 13 13:25:50 crc kubenswrapper[4684]: I1013 13:25:50.449960 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 13:25:50 crc kubenswrapper[4684]: I1013 13:25:50.450277 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 13:25:50 crc kubenswrapper[4684]: I1013 13:25:50.463284 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 13:25:51 crc kubenswrapper[4684]: I1013 13:25:51.224858 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 13:25:54 crc kubenswrapper[4684]: E1013 13:25:54.231062 4684 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod939914d8_d67b_43c3_a0ee_16136fa489da.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59b6e322_6196_45af_ae09_a3028d9e2c84.slice/crio-conmon-cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod939914d8_d67b_43c3_a0ee_16136fa489da.slice/crio-cc42d03d048ce0d1f96e472542f11cfd3de7584a15d17170d1a19b26cab90911\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59b6e322_6196_45af_ae09_a3028d9e2c84.slice/crio-cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb.scope\": RecentStats: unable to find data in memory cache]" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.261336 4684 generic.go:334] "Generic (PLEG): container finished" podID="59b6e322-6196-45af-ae09-a3028d9e2c84" containerID="cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb" exitCode=137 Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.261400 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59b6e322-6196-45af-ae09-a3028d9e2c84","Type":"ContainerDied","Data":"cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb"} Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.385151 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.467313 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-combined-ca-bundle\") pod \"59b6e322-6196-45af-ae09-a3028d9e2c84\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.467521 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grs8j\" (UniqueName: \"kubernetes.io/projected/59b6e322-6196-45af-ae09-a3028d9e2c84-kube-api-access-grs8j\") pod \"59b6e322-6196-45af-ae09-a3028d9e2c84\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.467668 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-config-data\") pod \"59b6e322-6196-45af-ae09-a3028d9e2c84\" (UID: \"59b6e322-6196-45af-ae09-a3028d9e2c84\") " Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.473744 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59b6e322-6196-45af-ae09-a3028d9e2c84-kube-api-access-grs8j" (OuterVolumeSpecName: "kube-api-access-grs8j") pod "59b6e322-6196-45af-ae09-a3028d9e2c84" (UID: "59b6e322-6196-45af-ae09-a3028d9e2c84"). InnerVolumeSpecName "kube-api-access-grs8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.506521 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59b6e322-6196-45af-ae09-a3028d9e2c84" (UID: "59b6e322-6196-45af-ae09-a3028d9e2c84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.507077 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-config-data" (OuterVolumeSpecName: "config-data") pod "59b6e322-6196-45af-ae09-a3028d9e2c84" (UID: "59b6e322-6196-45af-ae09-a3028d9e2c84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.569010 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.569046 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59b6e322-6196-45af-ae09-a3028d9e2c84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:54 crc kubenswrapper[4684]: I1013 13:25:54.569057 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grs8j\" (UniqueName: \"kubernetes.io/projected/59b6e322-6196-45af-ae09-a3028d9e2c84-kube-api-access-grs8j\") on node \"crc\" DevicePath \"\"" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.269857 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59b6e322-6196-45af-ae09-a3028d9e2c84","Type":"ContainerDied","Data":"d2929fd94c978232a978eed35f339303ffba5dbff073419bf0d04b6ba0e66f11"} Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.270215 4684 scope.go:117] "RemoveContainer" containerID="cd7d1bec2d9ff579cddccf3738c4d635e346422c74a8ac48234a33fee4732edb" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.270006 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.304605 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.318058 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.336246 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:55 crc kubenswrapper[4684]: E1013 13:25:55.336673 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59b6e322-6196-45af-ae09-a3028d9e2c84" containerName="nova-cell1-novncproxy-novncproxy" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.336688 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="59b6e322-6196-45af-ae09-a3028d9e2c84" containerName="nova-cell1-novncproxy-novncproxy" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.336882 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="59b6e322-6196-45af-ae09-a3028d9e2c84" containerName="nova-cell1-novncproxy-novncproxy" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.337560 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.340141 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.340475 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.340657 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.357563 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.381736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.381788 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.382137 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.382228 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.382251 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g8cv\" (UniqueName: \"kubernetes.io/projected/15cff776-4185-4994-b294-c4ba8e704bb1-kube-api-access-4g8cv\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.488986 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.489040 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.489058 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g8cv\" (UniqueName: \"kubernetes.io/projected/15cff776-4185-4994-b294-c4ba8e704bb1-kube-api-access-4g8cv\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.489145 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.489164 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.495744 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.495952 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.509521 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.514785 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15cff776-4185-4994-b294-c4ba8e704bb1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.516487 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g8cv\" (UniqueName: \"kubernetes.io/projected/15cff776-4185-4994-b294-c4ba8e704bb1-kube-api-access-4g8cv\") pod \"nova-cell1-novncproxy-0\" (UID: \"15cff776-4185-4994-b294-c4ba8e704bb1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.659138 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.771376 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.771447 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.772374 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.772421 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.775056 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.777343 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.965400 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54556fbfd5-lzq42"] Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.967225 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:55 crc kubenswrapper[4684]: I1013 13:25:55.980988 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54556fbfd5-lzq42"] Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.011226 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-config\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.011274 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-swift-storage-0\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.011312 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krwk4\" (UniqueName: \"kubernetes.io/projected/1992e4de-4eef-4025-829e-aa304f4ee7da-kube-api-access-krwk4\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.011389 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-nb\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.011429 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-sb\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.011453 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-svc\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.112306 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-config\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.112353 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-swift-storage-0\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.112380 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krwk4\" (UniqueName: \"kubernetes.io/projected/1992e4de-4eef-4025-829e-aa304f4ee7da-kube-api-access-krwk4\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.112425 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-nb\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.112451 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-sb\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.112471 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-svc\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.113493 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-svc\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.113661 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-config\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.114220 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-nb\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.114306 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-swift-storage-0\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.114750 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-sb\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.136653 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krwk4\" (UniqueName: \"kubernetes.io/projected/1992e4de-4eef-4025-829e-aa304f4ee7da-kube-api-access-krwk4\") pod \"dnsmasq-dns-54556fbfd5-lzq42\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.168015 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 13:25:56 crc kubenswrapper[4684]: W1013 13:25:56.170064 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15cff776_4185_4994_b294_c4ba8e704bb1.slice/crio-ee3072ffa959b21bd6b29eae62c073576e0c6a614b5906d756eb5076710a8513 WatchSource:0}: Error finding container ee3072ffa959b21bd6b29eae62c073576e0c6a614b5906d756eb5076710a8513: Status 404 returned error can't find the container with id ee3072ffa959b21bd6b29eae62c073576e0c6a614b5906d756eb5076710a8513 Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.281711 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"15cff776-4185-4994-b294-c4ba8e704bb1","Type":"ContainerStarted","Data":"ee3072ffa959b21bd6b29eae62c073576e0c6a614b5906d756eb5076710a8513"} Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.320435 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.360170 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59b6e322-6196-45af-ae09-a3028d9e2c84" path="/var/lib/kubelet/pods/59b6e322-6196-45af-ae09-a3028d9e2c84/volumes" Oct 13 13:25:56 crc kubenswrapper[4684]: I1013 13:25:56.820990 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54556fbfd5-lzq42"] Oct 13 13:25:56 crc kubenswrapper[4684]: W1013 13:25:56.845416 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1992e4de_4eef_4025_829e_aa304f4ee7da.slice/crio-0242dcbe1f096536a50116af9e29882e5252bf50204771471e4e50dd71d2fd27 WatchSource:0}: Error finding container 0242dcbe1f096536a50116af9e29882e5252bf50204771471e4e50dd71d2fd27: Status 404 returned error can't find the container with id 0242dcbe1f096536a50116af9e29882e5252bf50204771471e4e50dd71d2fd27 Oct 13 13:25:57 crc kubenswrapper[4684]: I1013 13:25:57.292324 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"15cff776-4185-4994-b294-c4ba8e704bb1","Type":"ContainerStarted","Data":"37cad3d584557c54ea2d52936a6a51cfe0537ca92412b9f49bcdff5a692125c3"} Oct 13 13:25:57 crc kubenswrapper[4684]: I1013 13:25:57.294303 4684 generic.go:334] "Generic (PLEG): container finished" podID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerID="e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184" exitCode=0 Oct 13 13:25:57 crc kubenswrapper[4684]: I1013 13:25:57.294385 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" event={"ID":"1992e4de-4eef-4025-829e-aa304f4ee7da","Type":"ContainerDied","Data":"e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184"} Oct 13 13:25:57 crc kubenswrapper[4684]: I1013 13:25:57.294413 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" event={"ID":"1992e4de-4eef-4025-829e-aa304f4ee7da","Type":"ContainerStarted","Data":"0242dcbe1f096536a50116af9e29882e5252bf50204771471e4e50dd71d2fd27"} Oct 13 13:25:57 crc kubenswrapper[4684]: I1013 13:25:57.318459 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.3184387 podStartE2EDuration="2.3184387s" podCreationTimestamp="2025-10-13 13:25:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:57.312289327 +0000 UTC m=+1111.879673397" watchObservedRunningTime="2025-10-13 13:25:57.3184387 +0000 UTC m=+1111.885822770" Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.102802 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.103338 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-central-agent" containerID="cri-o://92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215" gracePeriod=30 Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.103459 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-notification-agent" containerID="cri-o://807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db" gracePeriod=30 Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.103449 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="proxy-httpd" containerID="cri-o://ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393" gracePeriod=30 Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.103466 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="sg-core" containerID="cri-o://e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894" gracePeriod=30 Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.308963 4684 generic.go:334] "Generic (PLEG): container finished" podID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerID="e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894" exitCode=2 Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.309033 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerDied","Data":"e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894"} Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.312817 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" event={"ID":"1992e4de-4eef-4025-829e-aa304f4ee7da","Type":"ContainerStarted","Data":"c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b"} Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.351399 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" podStartSLOduration=3.35137798 podStartE2EDuration="3.35137798s" podCreationTimestamp="2025-10-13 13:25:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:25:58.349330126 +0000 UTC m=+1112.916714196" watchObservedRunningTime="2025-10-13 13:25:58.35137798 +0000 UTC m=+1112.918762050" Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.681143 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.681653 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-log" containerID="cri-o://1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692" gracePeriod=30 Oct 13 13:25:58 crc kubenswrapper[4684]: I1013 13:25:58.681738 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-api" containerID="cri-o://76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5" gracePeriod=30 Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.323373 4684 generic.go:334] "Generic (PLEG): container finished" podID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerID="ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393" exitCode=0 Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.323422 4684 generic.go:334] "Generic (PLEG): container finished" podID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerID="92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215" exitCode=0 Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.323454 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerDied","Data":"ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393"} Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.323493 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerDied","Data":"92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215"} Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.325889 4684 generic.go:334] "Generic (PLEG): container finished" podID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerID="1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692" exitCode=143 Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.325924 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14e2d01c-4826-4aa5-b1c6-686374eee6fa","Type":"ContainerDied","Data":"1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692"} Oct 13 13:25:59 crc kubenswrapper[4684]: I1013 13:25:59.326175 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:26:00 crc kubenswrapper[4684]: I1013 13:26:00.559788 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:26:00 crc kubenswrapper[4684]: I1013 13:26:00.560128 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:26:00 crc kubenswrapper[4684]: I1013 13:26:00.660171 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.358085 4684 generic.go:334] "Generic (PLEG): container finished" podID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerID="76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5" exitCode=0 Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.358881 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.361413 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14e2d01c-4826-4aa5-b1c6-686374eee6fa","Type":"ContainerDied","Data":"76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5"} Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.361453 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"14e2d01c-4826-4aa5-b1c6-686374eee6fa","Type":"ContainerDied","Data":"8f5a5fc6fb247c346760bffec705f87d8c8cdefee4bac737ec7b0b056398d7db"} Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.361476 4684 scope.go:117] "RemoveContainer" containerID="76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.394008 4684 scope.go:117] "RemoveContainer" containerID="1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.416322 4684 scope.go:117] "RemoveContainer" containerID="76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5" Oct 13 13:26:02 crc kubenswrapper[4684]: E1013 13:26:02.417146 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5\": container with ID starting with 76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5 not found: ID does not exist" containerID="76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.417184 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5"} err="failed to get container status \"76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5\": rpc error: code = NotFound desc = could not find container \"76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5\": container with ID starting with 76a04656f1494eb92385266a2437d39e134f214cab20420e26f65061fab92cc5 not found: ID does not exist" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.417209 4684 scope.go:117] "RemoveContainer" containerID="1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692" Oct 13 13:26:02 crc kubenswrapper[4684]: E1013 13:26:02.421429 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692\": container with ID starting with 1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692 not found: ID does not exist" containerID="1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.421479 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692"} err="failed to get container status \"1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692\": rpc error: code = NotFound desc = could not find container \"1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692\": container with ID starting with 1767f53ce477058a2ef03b08d77c2a823576dfd5422e622a06dd9bc912593692 not found: ID does not exist" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.435660 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-combined-ca-bundle\") pod \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.435737 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-config-data\") pod \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.435957 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw5f9\" (UniqueName: \"kubernetes.io/projected/14e2d01c-4826-4aa5-b1c6-686374eee6fa-kube-api-access-kw5f9\") pod \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.436082 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e2d01c-4826-4aa5-b1c6-686374eee6fa-logs\") pod \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\" (UID: \"14e2d01c-4826-4aa5-b1c6-686374eee6fa\") " Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.436488 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14e2d01c-4826-4aa5-b1c6-686374eee6fa-logs" (OuterVolumeSpecName: "logs") pod "14e2d01c-4826-4aa5-b1c6-686374eee6fa" (UID: "14e2d01c-4826-4aa5-b1c6-686374eee6fa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.436825 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14e2d01c-4826-4aa5-b1c6-686374eee6fa-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.441728 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14e2d01c-4826-4aa5-b1c6-686374eee6fa-kube-api-access-kw5f9" (OuterVolumeSpecName: "kube-api-access-kw5f9") pod "14e2d01c-4826-4aa5-b1c6-686374eee6fa" (UID: "14e2d01c-4826-4aa5-b1c6-686374eee6fa"). InnerVolumeSpecName "kube-api-access-kw5f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.472862 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14e2d01c-4826-4aa5-b1c6-686374eee6fa" (UID: "14e2d01c-4826-4aa5-b1c6-686374eee6fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.478158 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-config-data" (OuterVolumeSpecName: "config-data") pod "14e2d01c-4826-4aa5-b1c6-686374eee6fa" (UID: "14e2d01c-4826-4aa5-b1c6-686374eee6fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.538126 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.538153 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e2d01c-4826-4aa5-b1c6-686374eee6fa-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:02 crc kubenswrapper[4684]: I1013 13:26:02.538162 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw5f9\" (UniqueName: \"kubernetes.io/projected/14e2d01c-4826-4aa5-b1c6-686374eee6fa-kube-api-access-kw5f9\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.095202 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149476 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-log-httpd\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149671 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrjsc\" (UniqueName: \"kubernetes.io/projected/92e6f67a-af44-43a1-bbec-ded11be19f59-kube-api-access-zrjsc\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149718 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-run-httpd\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149751 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-config-data\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149788 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-scripts\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149889 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-ceilometer-tls-certs\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.149944 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-sg-core-conf-yaml\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.150015 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-combined-ca-bundle\") pod \"92e6f67a-af44-43a1-bbec-ded11be19f59\" (UID: \"92e6f67a-af44-43a1-bbec-ded11be19f59\") " Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.150203 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.150235 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.150802 4684 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.150823 4684 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92e6f67a-af44-43a1-bbec-ded11be19f59-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.167218 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-scripts" (OuterVolumeSpecName: "scripts") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.168114 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92e6f67a-af44-43a1-bbec-ded11be19f59-kube-api-access-zrjsc" (OuterVolumeSpecName: "kube-api-access-zrjsc") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "kube-api-access-zrjsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.214955 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.227951 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.254197 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrjsc\" (UniqueName: \"kubernetes.io/projected/92e6f67a-af44-43a1-bbec-ded11be19f59-kube-api-access-zrjsc\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.254232 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.254241 4684 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.254250 4684 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.291381 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.293498 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-config-data" (OuterVolumeSpecName: "config-data") pod "92e6f67a-af44-43a1-bbec-ded11be19f59" (UID: "92e6f67a-af44-43a1-bbec-ded11be19f59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.356411 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.356452 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92e6f67a-af44-43a1-bbec-ded11be19f59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.368603 4684 generic.go:334] "Generic (PLEG): container finished" podID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerID="807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db" exitCode=0 Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.368665 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.368703 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerDied","Data":"807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db"} Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.368757 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"92e6f67a-af44-43a1-bbec-ded11be19f59","Type":"ContainerDied","Data":"47fce64c23be447b6fb3e27de23e55929f376f21977305d435aedb77f4fac784"} Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.368778 4684 scope.go:117] "RemoveContainer" containerID="ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.370228 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.404763 4684 scope.go:117] "RemoveContainer" containerID="e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.412370 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.426419 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.436742 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.440237 4684 scope.go:117] "RemoveContainer" containerID="807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.450380 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.460143 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.461118 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="proxy-httpd" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.461243 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="proxy-httpd" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.461335 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-notification-agent" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.461409 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-notification-agent" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.461501 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-api" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.461580 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-api" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.461671 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="sg-core" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.471809 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="sg-core" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.472100 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-central-agent" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.472188 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-central-agent" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.472299 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-log" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.472385 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-log" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.472873 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-log" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.473006 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-central-agent" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.473102 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="proxy-httpd" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.474113 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" containerName="nova-api-api" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.474216 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="sg-core" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.474307 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" containerName="ceilometer-notification-agent" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.477065 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.485812 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.487567 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.488451 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.488532 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.492561 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.492704 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.492944 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.494108 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.497511 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.516274 4684 scope.go:117] "RemoveContainer" containerID="92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.557200 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.560887 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.560993 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561018 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-config-data\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561140 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-config-data\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561168 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71041122-46fe-4fe2-8943-9100f8c228b0-logs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561214 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-log-httpd\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561237 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-run-httpd\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561258 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561289 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561310 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-scripts\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561346 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79qf6\" (UniqueName: \"kubernetes.io/projected/71041122-46fe-4fe2-8943-9100f8c228b0-kube-api-access-79qf6\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561370 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-public-tls-certs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561397 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.561428 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9x9d\" (UniqueName: \"kubernetes.io/projected/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-kube-api-access-c9x9d\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.568988 4684 scope.go:117] "RemoveContainer" containerID="ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.569704 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393\": container with ID starting with ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393 not found: ID does not exist" containerID="ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.569766 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393"} err="failed to get container status \"ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393\": rpc error: code = NotFound desc = could not find container \"ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393\": container with ID starting with ab1a1753ec7c0f82becb2c47c3e7dfc5f982580c1fcb2f99ab8e6c8e0ca0d393 not found: ID does not exist" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.569806 4684 scope.go:117] "RemoveContainer" containerID="e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.570355 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894\": container with ID starting with e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894 not found: ID does not exist" containerID="e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.570382 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894"} err="failed to get container status \"e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894\": rpc error: code = NotFound desc = could not find container \"e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894\": container with ID starting with e37e0c33d7df083b84dc79703516dd3186fe87c6f2bc8e15b9bcb9f5e9f43894 not found: ID does not exist" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.570394 4684 scope.go:117] "RemoveContainer" containerID="807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.570625 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db\": container with ID starting with 807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db not found: ID does not exist" containerID="807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.570643 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db"} err="failed to get container status \"807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db\": rpc error: code = NotFound desc = could not find container \"807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db\": container with ID starting with 807330ea973ae4dd65a81fbc8bbd9642dcc0273d2c27907b8324c071756cd1db not found: ID does not exist" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.570657 4684 scope.go:117] "RemoveContainer" containerID="92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215" Oct 13 13:26:03 crc kubenswrapper[4684]: E1013 13:26:03.571507 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215\": container with ID starting with 92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215 not found: ID does not exist" containerID="92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.571523 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215"} err="failed to get container status \"92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215\": rpc error: code = NotFound desc = could not find container \"92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215\": container with ID starting with 92593e0fbed6edfc48873e651555fe775db3c23499bbabd991158ba4240eb215 not found: ID does not exist" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663054 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-config-data\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663104 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71041122-46fe-4fe2-8943-9100f8c228b0-logs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663129 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-log-httpd\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663143 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-run-httpd\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663164 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663188 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663203 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-scripts\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663232 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79qf6\" (UniqueName: \"kubernetes.io/projected/71041122-46fe-4fe2-8943-9100f8c228b0-kube-api-access-79qf6\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663253 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-public-tls-certs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663276 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663297 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9x9d\" (UniqueName: \"kubernetes.io/projected/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-kube-api-access-c9x9d\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663347 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663373 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.663387 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-config-data\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.664295 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-log-httpd\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.664348 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-run-httpd\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.664605 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71041122-46fe-4fe2-8943-9100f8c228b0-logs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.666950 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-config-data\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.666959 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-scripts\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.667255 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.667793 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-config-data\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.668997 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.669185 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.673374 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.674360 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.676066 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-public-tls-certs\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.684468 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9x9d\" (UniqueName: \"kubernetes.io/projected/ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6-kube-api-access-c9x9d\") pod \"ceilometer-0\" (UID: \"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6\") " pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.688366 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79qf6\" (UniqueName: \"kubernetes.io/projected/71041122-46fe-4fe2-8943-9100f8c228b0-kube-api-access-79qf6\") pod \"nova-api-0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " pod="openstack/nova-api-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.817868 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 13:26:03 crc kubenswrapper[4684]: I1013 13:26:03.837857 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:04 crc kubenswrapper[4684]: I1013 13:26:04.322889 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 13:26:04 crc kubenswrapper[4684]: I1013 13:26:04.363852 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14e2d01c-4826-4aa5-b1c6-686374eee6fa" path="/var/lib/kubelet/pods/14e2d01c-4826-4aa5-b1c6-686374eee6fa/volumes" Oct 13 13:26:04 crc kubenswrapper[4684]: I1013 13:26:04.364927 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92e6f67a-af44-43a1-bbec-ded11be19f59" path="/var/lib/kubelet/pods/92e6f67a-af44-43a1-bbec-ded11be19f59/volumes" Oct 13 13:26:04 crc kubenswrapper[4684]: I1013 13:26:04.389257 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6","Type":"ContainerStarted","Data":"bbdd2d9e68afc3fb0b8cbd4114249d2e311e233bd8875cc3695f73ce5dfba998"} Oct 13 13:26:04 crc kubenswrapper[4684]: W1013 13:26:04.398564 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71041122_46fe_4fe2_8943_9100f8c228b0.slice/crio-e4619b3160ed359e3b63b00f7b7b24007faaf4fcf463fd20f6d285efd0c99a7d WatchSource:0}: Error finding container e4619b3160ed359e3b63b00f7b7b24007faaf4fcf463fd20f6d285efd0c99a7d: Status 404 returned error can't find the container with id e4619b3160ed359e3b63b00f7b7b24007faaf4fcf463fd20f6d285efd0c99a7d Oct 13 13:26:04 crc kubenswrapper[4684]: I1013 13:26:04.401803 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.407759 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"71041122-46fe-4fe2-8943-9100f8c228b0","Type":"ContainerStarted","Data":"1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae"} Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.408253 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"71041122-46fe-4fe2-8943-9100f8c228b0","Type":"ContainerStarted","Data":"df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a"} Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.408268 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"71041122-46fe-4fe2-8943-9100f8c228b0","Type":"ContainerStarted","Data":"e4619b3160ed359e3b63b00f7b7b24007faaf4fcf463fd20f6d285efd0c99a7d"} Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.410965 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6","Type":"ContainerStarted","Data":"92d597d6ad76a02dbc68a33ce7127b20f30fccf83c5710fc120190edad724db7"} Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.411005 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6","Type":"ContainerStarted","Data":"b6786cc03f50660935ddb5fbd05b0cebd8ca0300c17e008a7148b59db659c8fc"} Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.429269 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.429252946 podStartE2EDuration="2.429252946s" podCreationTimestamp="2025-10-13 13:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:26:05.426295493 +0000 UTC m=+1119.993679563" watchObservedRunningTime="2025-10-13 13:26:05.429252946 +0000 UTC m=+1119.996637016" Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.659744 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:26:05 crc kubenswrapper[4684]: I1013 13:26:05.693053 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.322063 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.413426 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68845c6b95-pcn55"] Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.413756 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerName="dnsmasq-dns" containerID="cri-o://aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672" gracePeriod=10 Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.451669 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6","Type":"ContainerStarted","Data":"b85a52199a9e4a0dbae9e642410d43ab2e49af213576b0d7fc32589838336b80"} Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.474776 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.721918 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-dz77b"] Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.726434 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.733193 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dz77b"] Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.737827 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.737845 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.834034 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w888m\" (UniqueName: \"kubernetes.io/projected/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-kube-api-access-w888m\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.834132 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-scripts\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.834172 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.834221 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-config-data\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.937009 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-scripts\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.937058 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.937100 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-config-data\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.937185 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w888m\" (UniqueName: \"kubernetes.io/projected/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-kube-api-access-w888m\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.947066 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-scripts\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.947097 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.955358 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w888m\" (UniqueName: \"kubernetes.io/projected/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-kube-api-access-w888m\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:06 crc kubenswrapper[4684]: I1013 13:26:06.972082 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-config-data\") pod \"nova-cell1-cell-mapping-dz77b\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.040341 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.049385 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.142752 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-swift-storage-0\") pod \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.142834 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-config\") pod \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.142894 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-svc\") pod \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.143589 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-sb\") pod \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.143667 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfm9h\" (UniqueName: \"kubernetes.io/projected/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-kube-api-access-vfm9h\") pod \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.143767 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-nb\") pod \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\" (UID: \"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19\") " Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.148970 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-kube-api-access-vfm9h" (OuterVolumeSpecName: "kube-api-access-vfm9h") pod "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" (UID: "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19"). InnerVolumeSpecName "kube-api-access-vfm9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.199299 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" (UID: "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.217344 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" (UID: "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.224293 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" (UID: "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.231161 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" (UID: "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.238659 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-config" (OuterVolumeSpecName: "config") pod "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" (UID: "e6a99f03-7caa-4fa4-856a-26d2ec9a5d19"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.247301 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.247335 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.247347 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.247358 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.247369 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.247377 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfm9h\" (UniqueName: \"kubernetes.io/projected/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19-kube-api-access-vfm9h\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.461695 4684 generic.go:334] "Generic (PLEG): container finished" podID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerID="aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672" exitCode=0 Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.461775 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.461822 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" event={"ID":"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19","Type":"ContainerDied","Data":"aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672"} Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.463303 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68845c6b95-pcn55" event={"ID":"e6a99f03-7caa-4fa4-856a-26d2ec9a5d19","Type":"ContainerDied","Data":"801edcbbbb1296d857d41105fac35b6e187f5ae6b517090e7c39799b1e3f5ca4"} Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.463400 4684 scope.go:117] "RemoveContainer" containerID="aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.496254 4684 scope.go:117] "RemoveContainer" containerID="a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.502588 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68845c6b95-pcn55"] Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.513509 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68845c6b95-pcn55"] Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.517081 4684 scope.go:117] "RemoveContainer" containerID="aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672" Oct 13 13:26:07 crc kubenswrapper[4684]: E1013 13:26:07.517785 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672\": container with ID starting with aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672 not found: ID does not exist" containerID="aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.517831 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672"} err="failed to get container status \"aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672\": rpc error: code = NotFound desc = could not find container \"aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672\": container with ID starting with aeccab7e06f84dacaf38b38d3219716b91cb27230d4bae65bf34d8ffa145a672 not found: ID does not exist" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.517859 4684 scope.go:117] "RemoveContainer" containerID="a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb" Oct 13 13:26:07 crc kubenswrapper[4684]: E1013 13:26:07.518134 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb\": container with ID starting with a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb not found: ID does not exist" containerID="a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb" Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.518157 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb"} err="failed to get container status \"a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb\": rpc error: code = NotFound desc = could not find container \"a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb\": container with ID starting with a4531bc3114ce426c3115e32ae839f76e0c3911482068252df9d2ea945ddf4fb not found: ID does not exist" Oct 13 13:26:07 crc kubenswrapper[4684]: W1013 13:26:07.572558 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d7ae4ab_21ea_43b7_b33b_0abe75b043dc.slice/crio-a70398de85205cb198c00a6c560492d214c5993941ff914e62684c4e666ebb2e WatchSource:0}: Error finding container a70398de85205cb198c00a6c560492d214c5993941ff914e62684c4e666ebb2e: Status 404 returned error can't find the container with id a70398de85205cb198c00a6c560492d214c5993941ff914e62684c4e666ebb2e Oct 13 13:26:07 crc kubenswrapper[4684]: I1013 13:26:07.589108 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dz77b"] Oct 13 13:26:08 crc kubenswrapper[4684]: I1013 13:26:08.382500 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" path="/var/lib/kubelet/pods/e6a99f03-7caa-4fa4-856a-26d2ec9a5d19/volumes" Oct 13 13:26:08 crc kubenswrapper[4684]: I1013 13:26:08.555294 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dz77b" event={"ID":"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc","Type":"ContainerStarted","Data":"7c8eb225eb18436868922853baa2ded66725adcc42a358daed53643bae5db44b"} Oct 13 13:26:08 crc kubenswrapper[4684]: I1013 13:26:08.555345 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dz77b" event={"ID":"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc","Type":"ContainerStarted","Data":"a70398de85205cb198c00a6c560492d214c5993941ff914e62684c4e666ebb2e"} Oct 13 13:26:08 crc kubenswrapper[4684]: I1013 13:26:08.573970 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-dz77b" podStartSLOduration=2.573951216 podStartE2EDuration="2.573951216s" podCreationTimestamp="2025-10-13 13:26:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:26:08.573213662 +0000 UTC m=+1123.140597732" watchObservedRunningTime="2025-10-13 13:26:08.573951216 +0000 UTC m=+1123.141335286" Oct 13 13:26:09 crc kubenswrapper[4684]: I1013 13:26:09.591000 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6","Type":"ContainerStarted","Data":"918a7eb4ff4d0e8a2066aecd1483e982637b55d16c0757d82c886d2f2302babb"} Oct 13 13:26:09 crc kubenswrapper[4684]: I1013 13:26:09.591301 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 13:26:09 crc kubenswrapper[4684]: I1013 13:26:09.624793 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.530183594 podStartE2EDuration="6.624771046s" podCreationTimestamp="2025-10-13 13:26:03 +0000 UTC" firstStartedPulling="2025-10-13 13:26:04.327122085 +0000 UTC m=+1118.894506155" lastFinishedPulling="2025-10-13 13:26:08.421709547 +0000 UTC m=+1122.989093607" observedRunningTime="2025-10-13 13:26:09.615055862 +0000 UTC m=+1124.182439932" watchObservedRunningTime="2025-10-13 13:26:09.624771046 +0000 UTC m=+1124.192155126" Oct 13 13:26:12 crc kubenswrapper[4684]: I1013 13:26:12.622720 4684 generic.go:334] "Generic (PLEG): container finished" podID="2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" containerID="7c8eb225eb18436868922853baa2ded66725adcc42a358daed53643bae5db44b" exitCode=0 Oct 13 13:26:12 crc kubenswrapper[4684]: I1013 13:26:12.623215 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dz77b" event={"ID":"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc","Type":"ContainerDied","Data":"7c8eb225eb18436868922853baa2ded66725adcc42a358daed53643bae5db44b"} Oct 13 13:26:13 crc kubenswrapper[4684]: I1013 13:26:13.838682 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:26:13 crc kubenswrapper[4684]: I1013 13:26:13.839141 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:26:13 crc kubenswrapper[4684]: I1013 13:26:13.989569 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.095624 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w888m\" (UniqueName: \"kubernetes.io/projected/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-kube-api-access-w888m\") pod \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.095790 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-scripts\") pod \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.095835 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-combined-ca-bundle\") pod \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.095868 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-config-data\") pod \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\" (UID: \"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc\") " Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.104005 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-scripts" (OuterVolumeSpecName: "scripts") pod "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" (UID: "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.105764 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-kube-api-access-w888m" (OuterVolumeSpecName: "kube-api-access-w888m") pod "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" (UID: "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc"). InnerVolumeSpecName "kube-api-access-w888m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.136022 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-config-data" (OuterVolumeSpecName: "config-data") pod "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" (UID: "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.172004 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" (UID: "2d7ae4ab-21ea-43b7-b33b-0abe75b043dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.198622 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w888m\" (UniqueName: \"kubernetes.io/projected/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-kube-api-access-w888m\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.198663 4684 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.198674 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.198682 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.641512 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dz77b" event={"ID":"2d7ae4ab-21ea-43b7-b33b-0abe75b043dc","Type":"ContainerDied","Data":"a70398de85205cb198c00a6c560492d214c5993941ff914e62684c4e666ebb2e"} Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.641832 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a70398de85205cb198c00a6c560492d214c5993941ff914e62684c4e666ebb2e" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.641573 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dz77b" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.777788 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.778067 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8265b2be-813d-48d3-b1d5-9a7c93215b54" containerName="nova-scheduler-scheduler" containerID="cri-o://79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" gracePeriod=30 Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.810876 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.811121 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-log" containerID="cri-o://df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a" gracePeriod=30 Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.811504 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-api" containerID="cri-o://1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae" gracePeriod=30 Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.825156 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.196:8774/\": EOF" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.825357 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.196:8774/\": EOF" Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.859162 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.859397 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-log" containerID="cri-o://40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6" gracePeriod=30 Oct 13 13:26:14 crc kubenswrapper[4684]: I1013 13:26:14.859479 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-metadata" containerID="cri-o://c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4" gracePeriod=30 Oct 13 13:26:15 crc kubenswrapper[4684]: I1013 13:26:15.651234 4684 generic.go:334] "Generic (PLEG): container finished" podID="71041122-46fe-4fe2-8943-9100f8c228b0" containerID="df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a" exitCode=143 Oct 13 13:26:15 crc kubenswrapper[4684]: I1013 13:26:15.651332 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"71041122-46fe-4fe2-8943-9100f8c228b0","Type":"ContainerDied","Data":"df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a"} Oct 13 13:26:15 crc kubenswrapper[4684]: I1013 13:26:15.654349 4684 generic.go:334] "Generic (PLEG): container finished" podID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerID="40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6" exitCode=143 Oct 13 13:26:15 crc kubenswrapper[4684]: I1013 13:26:15.654378 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15a73c00-9963-44b4-8e6f-a8e68e929a9e","Type":"ContainerDied","Data":"40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6"} Oct 13 13:26:16 crc kubenswrapper[4684]: E1013 13:26:16.476790 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 13:26:16 crc kubenswrapper[4684]: E1013 13:26:16.478440 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 13:26:16 crc kubenswrapper[4684]: E1013 13:26:16.480842 4684 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 13:26:16 crc kubenswrapper[4684]: E1013 13:26:16.480949 4684 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8265b2be-813d-48d3-b1d5-9a7c93215b54" containerName="nova-scheduler-scheduler" Oct 13 13:26:17 crc kubenswrapper[4684]: I1013 13:26:17.995975 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": read tcp 10.217.0.2:38194->10.217.0.189:8775: read: connection reset by peer" Oct 13 13:26:17 crc kubenswrapper[4684]: I1013 13:26:17.995991 4684 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": read tcp 10.217.0.2:38180->10.217.0.189:8775: read: connection reset by peer" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.459538 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.579609 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a73c00-9963-44b4-8e6f-a8e68e929a9e-logs\") pod \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.579684 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-config-data\") pod \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.579766 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-combined-ca-bundle\") pod \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.579787 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nfqq\" (UniqueName: \"kubernetes.io/projected/15a73c00-9963-44b4-8e6f-a8e68e929a9e-kube-api-access-2nfqq\") pod \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.579818 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-nova-metadata-tls-certs\") pod \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\" (UID: \"15a73c00-9963-44b4-8e6f-a8e68e929a9e\") " Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.580279 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a73c00-9963-44b4-8e6f-a8e68e929a9e-logs" (OuterVolumeSpecName: "logs") pod "15a73c00-9963-44b4-8e6f-a8e68e929a9e" (UID: "15a73c00-9963-44b4-8e6f-a8e68e929a9e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.585352 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a73c00-9963-44b4-8e6f-a8e68e929a9e-kube-api-access-2nfqq" (OuterVolumeSpecName: "kube-api-access-2nfqq") pod "15a73c00-9963-44b4-8e6f-a8e68e929a9e" (UID: "15a73c00-9963-44b4-8e6f-a8e68e929a9e"). InnerVolumeSpecName "kube-api-access-2nfqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.605492 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15a73c00-9963-44b4-8e6f-a8e68e929a9e" (UID: "15a73c00-9963-44b4-8e6f-a8e68e929a9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.609724 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-config-data" (OuterVolumeSpecName: "config-data") pod "15a73c00-9963-44b4-8e6f-a8e68e929a9e" (UID: "15a73c00-9963-44b4-8e6f-a8e68e929a9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.627561 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "15a73c00-9963-44b4-8e6f-a8e68e929a9e" (UID: "15a73c00-9963-44b4-8e6f-a8e68e929a9e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.682756 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683032 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nfqq\" (UniqueName: \"kubernetes.io/projected/15a73c00-9963-44b4-8e6f-a8e68e929a9e-kube-api-access-2nfqq\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683058 4684 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683070 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15a73c00-9963-44b4-8e6f-a8e68e929a9e-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683102 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a73c00-9963-44b4-8e6f-a8e68e929a9e-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683232 4684 generic.go:334] "Generic (PLEG): container finished" podID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerID="c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4" exitCode=0 Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683266 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15a73c00-9963-44b4-8e6f-a8e68e929a9e","Type":"ContainerDied","Data":"c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4"} Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683310 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683333 4684 scope.go:117] "RemoveContainer" containerID="c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.683320 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"15a73c00-9963-44b4-8e6f-a8e68e929a9e","Type":"ContainerDied","Data":"dc6b1cae58139ee0f58a872ed104913635486b9c4ae7b400cdc5a002e686c47b"} Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.720113 4684 scope.go:117] "RemoveContainer" containerID="40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.728211 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.739958 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.750866 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.751369 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerName="init" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751391 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerName="init" Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.751415 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-log" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751424 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-log" Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.751436 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-metadata" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751444 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-metadata" Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.751455 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerName="dnsmasq-dns" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751462 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerName="dnsmasq-dns" Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.751485 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" containerName="nova-manage" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751492 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" containerName="nova-manage" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751741 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6a99f03-7caa-4fa4-856a-26d2ec9a5d19" containerName="dnsmasq-dns" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751768 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-log" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751786 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" containerName="nova-metadata-metadata" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.751797 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" containerName="nova-manage" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.752821 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.754572 4684 scope.go:117] "RemoveContainer" containerID="c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4" Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.759045 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4\": container with ID starting with c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4 not found: ID does not exist" containerID="c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.759088 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4"} err="failed to get container status \"c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4\": rpc error: code = NotFound desc = could not find container \"c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4\": container with ID starting with c7748928cb55c1c4405cf56297071d626eb538f48b4d3c346ad462390a66d8d4 not found: ID does not exist" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.759115 4684 scope.go:117] "RemoveContainer" containerID="40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.760247 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.761757 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:26:18 crc kubenswrapper[4684]: E1013 13:26:18.765055 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6\": container with ID starting with 40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6 not found: ID does not exist" containerID="40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.765612 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6"} err="failed to get container status \"40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6\": rpc error: code = NotFound desc = could not find container \"40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6\": container with ID starting with 40d4b188aee7c2bdde457018353fe13126ea9bc79d9023be12cc820fa7c771c6 not found: ID does not exist" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.765307 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.887012 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-config-data\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.887392 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.887536 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdq84\" (UniqueName: \"kubernetes.io/projected/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-kube-api-access-rdq84\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.887697 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-logs\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.887802 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.990370 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdq84\" (UniqueName: \"kubernetes.io/projected/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-kube-api-access-rdq84\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.990889 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-logs\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.991054 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.991256 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-config-data\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.991379 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-logs\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.991527 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.996658 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.996801 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-config-data\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:18 crc kubenswrapper[4684]: I1013 13:26:18.999122 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:19 crc kubenswrapper[4684]: I1013 13:26:19.013257 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdq84\" (UniqueName: \"kubernetes.io/projected/6ce23c3b-fb2a-4762-9a9e-f3581e9150ca-kube-api-access-rdq84\") pod \"nova-metadata-0\" (UID: \"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca\") " pod="openstack/nova-metadata-0" Oct 13 13:26:19 crc kubenswrapper[4684]: I1013 13:26:19.082464 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 13:26:19 crc kubenswrapper[4684]: I1013 13:26:19.596633 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 13:26:19 crc kubenswrapper[4684]: W1013 13:26:19.597547 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ce23c3b_fb2a_4762_9a9e_f3581e9150ca.slice/crio-bf344f1fbbb7b571f4508c415ea463e07a693a753b18026fc3eb91568807ccc7 WatchSource:0}: Error finding container bf344f1fbbb7b571f4508c415ea463e07a693a753b18026fc3eb91568807ccc7: Status 404 returned error can't find the container with id bf344f1fbbb7b571f4508c415ea463e07a693a753b18026fc3eb91568807ccc7 Oct 13 13:26:19 crc kubenswrapper[4684]: I1013 13:26:19.693741 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca","Type":"ContainerStarted","Data":"bf344f1fbbb7b571f4508c415ea463e07a693a753b18026fc3eb91568807ccc7"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.365540 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15a73c00-9963-44b4-8e6f-a8e68e929a9e" path="/var/lib/kubelet/pods/15a73c00-9963-44b4-8e6f-a8e68e929a9e/volumes" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.403025 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.526068 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-config-data\") pod \"8265b2be-813d-48d3-b1d5-9a7c93215b54\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.526388 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-combined-ca-bundle\") pod \"8265b2be-813d-48d3-b1d5-9a7c93215b54\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.526621 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf5g9\" (UniqueName: \"kubernetes.io/projected/8265b2be-813d-48d3-b1d5-9a7c93215b54-kube-api-access-gf5g9\") pod \"8265b2be-813d-48d3-b1d5-9a7c93215b54\" (UID: \"8265b2be-813d-48d3-b1d5-9a7c93215b54\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.538155 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8265b2be-813d-48d3-b1d5-9a7c93215b54-kube-api-access-gf5g9" (OuterVolumeSpecName: "kube-api-access-gf5g9") pod "8265b2be-813d-48d3-b1d5-9a7c93215b54" (UID: "8265b2be-813d-48d3-b1d5-9a7c93215b54"). InnerVolumeSpecName "kube-api-access-gf5g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.578170 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8265b2be-813d-48d3-b1d5-9a7c93215b54" (UID: "8265b2be-813d-48d3-b1d5-9a7c93215b54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.583452 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-config-data" (OuterVolumeSpecName: "config-data") pod "8265b2be-813d-48d3-b1d5-9a7c93215b54" (UID: "8265b2be-813d-48d3-b1d5-9a7c93215b54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.630145 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf5g9\" (UniqueName: \"kubernetes.io/projected/8265b2be-813d-48d3-b1d5-9a7c93215b54-kube-api-access-gf5g9\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.630244 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.630266 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8265b2be-813d-48d3-b1d5-9a7c93215b54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.678921 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.708176 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca","Type":"ContainerStarted","Data":"8f5d37ada40754c1f2e12ea4f337f78f2ce6adcbfec803179337eab1d4e58172"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.708252 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6ce23c3b-fb2a-4762-9a9e-f3581e9150ca","Type":"ContainerStarted","Data":"3d43b057856623fe8db481efa49eb31ff1ccad30054e3a824f23c3c5af105559"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.712440 4684 generic.go:334] "Generic (PLEG): container finished" podID="71041122-46fe-4fe2-8943-9100f8c228b0" containerID="1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae" exitCode=0 Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.712652 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"71041122-46fe-4fe2-8943-9100f8c228b0","Type":"ContainerDied","Data":"1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.712700 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"71041122-46fe-4fe2-8943-9100f8c228b0","Type":"ContainerDied","Data":"e4619b3160ed359e3b63b00f7b7b24007faaf4fcf463fd20f6d285efd0c99a7d"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.712736 4684 scope.go:117] "RemoveContainer" containerID="1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.712737 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.714460 4684 generic.go:334] "Generic (PLEG): container finished" podID="8265b2be-813d-48d3-b1d5-9a7c93215b54" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" exitCode=0 Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.714584 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8265b2be-813d-48d3-b1d5-9a7c93215b54","Type":"ContainerDied","Data":"79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.714691 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8265b2be-813d-48d3-b1d5-9a7c93215b54","Type":"ContainerDied","Data":"0643b1a9bf6a54045213f959c3ab3458b9fe41e6f393401760ccb3c253dea516"} Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.714716 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.764191 4684 scope.go:117] "RemoveContainer" containerID="df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.768991 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.768976158 podStartE2EDuration="2.768976158s" podCreationTimestamp="2025-10-13 13:26:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:26:20.752039307 +0000 UTC m=+1135.319423387" watchObservedRunningTime="2025-10-13 13:26:20.768976158 +0000 UTC m=+1135.336360228" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.790691 4684 scope.go:117] "RemoveContainer" containerID="1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae" Oct 13 13:26:20 crc kubenswrapper[4684]: E1013 13:26:20.791170 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae\": container with ID starting with 1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae not found: ID does not exist" containerID="1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.791201 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae"} err="failed to get container status \"1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae\": rpc error: code = NotFound desc = could not find container \"1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae\": container with ID starting with 1740304713d5cab0bdcba6581277e0fe1e35d39df169813ff8b72f7514667fae not found: ID does not exist" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.791221 4684 scope.go:117] "RemoveContainer" containerID="df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a" Oct 13 13:26:20 crc kubenswrapper[4684]: E1013 13:26:20.792742 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a\": container with ID starting with df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a not found: ID does not exist" containerID="df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.792763 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a"} err="failed to get container status \"df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a\": rpc error: code = NotFound desc = could not find container \"df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a\": container with ID starting with df257588ec68e7ea8606ce916531e3767bde0cc0acfd0c3311e53177dd1a751a not found: ID does not exist" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.792778 4684 scope.go:117] "RemoveContainer" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.795991 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.805122 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.813167 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:26:20 crc kubenswrapper[4684]: E1013 13:26:20.813641 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-log" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.813664 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-log" Oct 13 13:26:20 crc kubenswrapper[4684]: E1013 13:26:20.813685 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8265b2be-813d-48d3-b1d5-9a7c93215b54" containerName="nova-scheduler-scheduler" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.813694 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="8265b2be-813d-48d3-b1d5-9a7c93215b54" containerName="nova-scheduler-scheduler" Oct 13 13:26:20 crc kubenswrapper[4684]: E1013 13:26:20.813715 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-api" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.813722 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-api" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.813985 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-api" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.814007 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" containerName="nova-api-log" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.814019 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="8265b2be-813d-48d3-b1d5-9a7c93215b54" containerName="nova-scheduler-scheduler" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.814788 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.817057 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.820812 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.821813 4684 scope.go:117] "RemoveContainer" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" Oct 13 13:26:20 crc kubenswrapper[4684]: E1013 13:26:20.828306 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78\": container with ID starting with 79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78 not found: ID does not exist" containerID="79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.828340 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78"} err="failed to get container status \"79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78\": rpc error: code = NotFound desc = could not find container \"79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78\": container with ID starting with 79c3ee9963c43e3fbe983c70d1510bbb0a214d280fd3d16e1683fb4e5e495d78 not found: ID does not exist" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.833376 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-public-tls-certs\") pod \"71041122-46fe-4fe2-8943-9100f8c228b0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.833443 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79qf6\" (UniqueName: \"kubernetes.io/projected/71041122-46fe-4fe2-8943-9100f8c228b0-kube-api-access-79qf6\") pod \"71041122-46fe-4fe2-8943-9100f8c228b0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.833511 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-combined-ca-bundle\") pod \"71041122-46fe-4fe2-8943-9100f8c228b0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.833630 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-config-data\") pod \"71041122-46fe-4fe2-8943-9100f8c228b0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.833689 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-internal-tls-certs\") pod \"71041122-46fe-4fe2-8943-9100f8c228b0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.833779 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71041122-46fe-4fe2-8943-9100f8c228b0-logs\") pod \"71041122-46fe-4fe2-8943-9100f8c228b0\" (UID: \"71041122-46fe-4fe2-8943-9100f8c228b0\") " Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.835859 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71041122-46fe-4fe2-8943-9100f8c228b0-logs" (OuterVolumeSpecName: "logs") pod "71041122-46fe-4fe2-8943-9100f8c228b0" (UID: "71041122-46fe-4fe2-8943-9100f8c228b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.841409 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71041122-46fe-4fe2-8943-9100f8c228b0-kube-api-access-79qf6" (OuterVolumeSpecName: "kube-api-access-79qf6") pod "71041122-46fe-4fe2-8943-9100f8c228b0" (UID: "71041122-46fe-4fe2-8943-9100f8c228b0"). InnerVolumeSpecName "kube-api-access-79qf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.858335 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-config-data" (OuterVolumeSpecName: "config-data") pod "71041122-46fe-4fe2-8943-9100f8c228b0" (UID: "71041122-46fe-4fe2-8943-9100f8c228b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.859311 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71041122-46fe-4fe2-8943-9100f8c228b0" (UID: "71041122-46fe-4fe2-8943-9100f8c228b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.882715 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "71041122-46fe-4fe2-8943-9100f8c228b0" (UID: "71041122-46fe-4fe2-8943-9100f8c228b0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.884262 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "71041122-46fe-4fe2-8943-9100f8c228b0" (UID: "71041122-46fe-4fe2-8943-9100f8c228b0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.937000 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/245c72eb-1ac1-4101-ae07-26a94bdae8e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.937072 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/245c72eb-1ac1-4101-ae07-26a94bdae8e1-config-data\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.937729 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt5l8\" (UniqueName: \"kubernetes.io/projected/245c72eb-1ac1-4101-ae07-26a94bdae8e1-kube-api-access-wt5l8\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.937978 4684 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.938008 4684 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71041122-46fe-4fe2-8943-9100f8c228b0-logs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.938030 4684 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.938048 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79qf6\" (UniqueName: \"kubernetes.io/projected/71041122-46fe-4fe2-8943-9100f8c228b0-kube-api-access-79qf6\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.938067 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:20 crc kubenswrapper[4684]: I1013 13:26:20.938084 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71041122-46fe-4fe2-8943-9100f8c228b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.039437 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/245c72eb-1ac1-4101-ae07-26a94bdae8e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.039473 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/245c72eb-1ac1-4101-ae07-26a94bdae8e1-config-data\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.039544 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt5l8\" (UniqueName: \"kubernetes.io/projected/245c72eb-1ac1-4101-ae07-26a94bdae8e1-kube-api-access-wt5l8\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.047439 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/245c72eb-1ac1-4101-ae07-26a94bdae8e1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.056664 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.060744 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/245c72eb-1ac1-4101-ae07-26a94bdae8e1-config-data\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.064128 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt5l8\" (UniqueName: \"kubernetes.io/projected/245c72eb-1ac1-4101-ae07-26a94bdae8e1-kube-api-access-wt5l8\") pod \"nova-scheduler-0\" (UID: \"245c72eb-1ac1-4101-ae07-26a94bdae8e1\") " pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.073028 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.087419 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.089371 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.092939 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.093386 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.093605 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.109091 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.134766 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.242371 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-public-tls-certs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.242786 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.242831 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-config-data\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.242887 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmqd5\" (UniqueName: \"kubernetes.io/projected/8332faf7-d7ae-421c-9f02-c17e93f044a5-kube-api-access-jmqd5\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.242950 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.243157 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8332faf7-d7ae-421c-9f02-c17e93f044a5-logs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.344822 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8332faf7-d7ae-421c-9f02-c17e93f044a5-logs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.345035 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-public-tls-certs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.345083 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.345135 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-config-data\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.345210 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmqd5\" (UniqueName: \"kubernetes.io/projected/8332faf7-d7ae-421c-9f02-c17e93f044a5-kube-api-access-jmqd5\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.346803 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.345541 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8332faf7-d7ae-421c-9f02-c17e93f044a5-logs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.360575 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.360641 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-public-tls-certs\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.361392 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-config-data\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.362458 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8332faf7-d7ae-421c-9f02-c17e93f044a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.366868 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmqd5\" (UniqueName: \"kubernetes.io/projected/8332faf7-d7ae-421c-9f02-c17e93f044a5-kube-api-access-jmqd5\") pod \"nova-api-0\" (UID: \"8332faf7-d7ae-421c-9f02-c17e93f044a5\") " pod="openstack/nova-api-0" Oct 13 13:26:21 crc kubenswrapper[4684]: I1013 13:26:21.549849 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.286461 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 13:26:22 crc kubenswrapper[4684]: W1013 13:26:22.291884 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod245c72eb_1ac1_4101_ae07_26a94bdae8e1.slice/crio-20386f09916b04b09d8d254aff29894550790e5d467c2a0673c17d31886997ea WatchSource:0}: Error finding container 20386f09916b04b09d8d254aff29894550790e5d467c2a0673c17d31886997ea: Status 404 returned error can't find the container with id 20386f09916b04b09d8d254aff29894550790e5d467c2a0673c17d31886997ea Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.366139 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71041122-46fe-4fe2-8943-9100f8c228b0" path="/var/lib/kubelet/pods/71041122-46fe-4fe2-8943-9100f8c228b0/volumes" Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.367163 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8265b2be-813d-48d3-b1d5-9a7c93215b54" path="/var/lib/kubelet/pods/8265b2be-813d-48d3-b1d5-9a7c93215b54/volumes" Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.379825 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 13:26:22 crc kubenswrapper[4684]: W1013 13:26:22.388059 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8332faf7_d7ae_421c_9f02_c17e93f044a5.slice/crio-0f627951990778ac5b5afe675c83981a8e776fa327d866c975923de1425e389b WatchSource:0}: Error finding container 0f627951990778ac5b5afe675c83981a8e776fa327d866c975923de1425e389b: Status 404 returned error can't find the container with id 0f627951990778ac5b5afe675c83981a8e776fa327d866c975923de1425e389b Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.735398 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8332faf7-d7ae-421c-9f02-c17e93f044a5","Type":"ContainerStarted","Data":"e6b71984971092b981d60c4601977c2660fc2717bcd48a22db2c05427c50df3a"} Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.735469 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8332faf7-d7ae-421c-9f02-c17e93f044a5","Type":"ContainerStarted","Data":"37dd7e0e04f23d6bf8c4739063eb3a93d6fc5b5b1b508ce55d638a00cf95d6c1"} Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.735487 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8332faf7-d7ae-421c-9f02-c17e93f044a5","Type":"ContainerStarted","Data":"0f627951990778ac5b5afe675c83981a8e776fa327d866c975923de1425e389b"} Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.737100 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"245c72eb-1ac1-4101-ae07-26a94bdae8e1","Type":"ContainerStarted","Data":"361ad27b94f198d23e6e6ea77f85737aa2017681785bc9aef3bf3ef3b20bdef5"} Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.737155 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"245c72eb-1ac1-4101-ae07-26a94bdae8e1","Type":"ContainerStarted","Data":"20386f09916b04b09d8d254aff29894550790e5d467c2a0673c17d31886997ea"} Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.760136 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.760117331 podStartE2EDuration="1.760117331s" podCreationTimestamp="2025-10-13 13:26:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:26:22.755117435 +0000 UTC m=+1137.322501515" watchObservedRunningTime="2025-10-13 13:26:22.760117331 +0000 UTC m=+1137.327501401" Oct 13 13:26:22 crc kubenswrapper[4684]: I1013 13:26:22.789411 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.7893906299999998 podStartE2EDuration="2.78939063s" podCreationTimestamp="2025-10-13 13:26:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:26:22.780044297 +0000 UTC m=+1137.347428407" watchObservedRunningTime="2025-10-13 13:26:22.78939063 +0000 UTC m=+1137.356774720" Oct 13 13:26:24 crc kubenswrapper[4684]: I1013 13:26:24.083552 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 13:26:24 crc kubenswrapper[4684]: I1013 13:26:24.084972 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 13:26:26 crc kubenswrapper[4684]: I1013 13:26:26.135193 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 13 13:26:29 crc kubenswrapper[4684]: I1013 13:26:29.083498 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 13:26:29 crc kubenswrapper[4684]: I1013 13:26:29.083815 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.097057 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6ce23c3b-fb2a-4762-9a9e-f3581e9150ca" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.097082 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6ce23c3b-fb2a-4762-9a9e-f3581e9150ca" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.560157 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.560453 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.560508 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.561489 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"763ba38e9ba892e0c9f5a9e9f4dafff85d37a0067ef41eb06df8ee48a015f12a"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.561571 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://763ba38e9ba892e0c9f5a9e9f4dafff85d37a0067ef41eb06df8ee48a015f12a" gracePeriod=600 Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.816745 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="763ba38e9ba892e0c9f5a9e9f4dafff85d37a0067ef41eb06df8ee48a015f12a" exitCode=0 Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.816817 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"763ba38e9ba892e0c9f5a9e9f4dafff85d37a0067ef41eb06df8ee48a015f12a"} Oct 13 13:26:30 crc kubenswrapper[4684]: I1013 13:26:30.816866 4684 scope.go:117] "RemoveContainer" containerID="5279c7d29a155e7a0bc08fab5ed9b611b1802504a86b780e2e40dd66f636409f" Oct 13 13:26:31 crc kubenswrapper[4684]: I1013 13:26:31.135977 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 13 13:26:31 crc kubenswrapper[4684]: I1013 13:26:31.169488 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 13 13:26:31 crc kubenswrapper[4684]: I1013 13:26:31.550407 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:26:31 crc kubenswrapper[4684]: I1013 13:26:31.550973 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 13:26:31 crc kubenswrapper[4684]: I1013 13:26:31.827149 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"d2e805361345d8e27af996006522c59e8ccaab786312d030265294583995b587"} Oct 13 13:26:31 crc kubenswrapper[4684]: I1013 13:26:31.860714 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 13 13:26:32 crc kubenswrapper[4684]: I1013 13:26:32.561040 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8332faf7-d7ae-421c-9f02-c17e93f044a5" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 13:26:32 crc kubenswrapper[4684]: I1013 13:26:32.561040 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8332faf7-d7ae-421c-9f02-c17e93f044a5" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 13:26:33 crc kubenswrapper[4684]: I1013 13:26:33.825283 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 13 13:26:39 crc kubenswrapper[4684]: I1013 13:26:39.088628 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 13:26:39 crc kubenswrapper[4684]: I1013 13:26:39.089272 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 13:26:39 crc kubenswrapper[4684]: I1013 13:26:39.094864 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 13:26:39 crc kubenswrapper[4684]: I1013 13:26:39.095050 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 13:26:41 crc kubenswrapper[4684]: I1013 13:26:41.557210 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 13:26:41 crc kubenswrapper[4684]: I1013 13:26:41.558388 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 13:26:41 crc kubenswrapper[4684]: I1013 13:26:41.562576 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 13:26:41 crc kubenswrapper[4684]: I1013 13:26:41.574802 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 13:26:41 crc kubenswrapper[4684]: I1013 13:26:41.915748 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 13:26:41 crc kubenswrapper[4684]: I1013 13:26:41.922286 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 13:26:49 crc kubenswrapper[4684]: I1013 13:26:49.888705 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:26:50 crc kubenswrapper[4684]: I1013 13:26:50.852323 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:26:54 crc kubenswrapper[4684]: I1013 13:26:54.048291 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerName="rabbitmq" containerID="cri-o://a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b" gracePeriod=604796 Oct 13 13:26:55 crc kubenswrapper[4684]: I1013 13:26:55.308890 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerName="rabbitmq" containerID="cri-o://4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59" gracePeriod=604796 Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.653016 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.769783 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-erlang-cookie\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.769882 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-config-data\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.769966 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b29378a-0de1-402e-993a-a83cc3d41b67-erlang-cookie-secret\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770049 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-plugins-conf\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770085 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-server-conf\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770133 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwlk2\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-kube-api-access-pwlk2\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770164 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-tls\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770199 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b29378a-0de1-402e-993a-a83cc3d41b67-pod-info\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770231 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-confd\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770293 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-plugins\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.770328 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"1b29378a-0de1-402e-993a-a83cc3d41b67\" (UID: \"1b29378a-0de1-402e-993a-a83cc3d41b67\") " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.772384 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.774716 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.775815 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.787655 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-kube-api-access-pwlk2" (OuterVolumeSpecName: "kube-api-access-pwlk2") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "kube-api-access-pwlk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.789574 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1b29378a-0de1-402e-993a-a83cc3d41b67-pod-info" (OuterVolumeSpecName: "pod-info") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.791544 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.792260 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b29378a-0de1-402e-993a-a83cc3d41b67-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.798557 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.832078 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-config-data" (OuterVolumeSpecName: "config-data") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873066 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-server-conf" (OuterVolumeSpecName: "server-conf") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873615 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873639 4684 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b29378a-0de1-402e-993a-a83cc3d41b67-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873653 4684 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873663 4684 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b29378a-0de1-402e-993a-a83cc3d41b67-server-conf\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873674 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwlk2\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-kube-api-access-pwlk2\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873683 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873695 4684 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b29378a-0de1-402e-993a-a83cc3d41b67-pod-info\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873704 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873734 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.873744 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.909110 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.930254 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1b29378a-0de1-402e-993a-a83cc3d41b67" (UID: "1b29378a-0de1-402e-993a-a83cc3d41b67"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.975814 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b29378a-0de1-402e-993a-a83cc3d41b67-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:00 crc kubenswrapper[4684]: I1013 13:27:00.975856 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.096433 4684 generic.go:334] "Generic (PLEG): container finished" podID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerID="a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b" exitCode=0 Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.096484 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b29378a-0de1-402e-993a-a83cc3d41b67","Type":"ContainerDied","Data":"a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b"} Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.096516 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b29378a-0de1-402e-993a-a83cc3d41b67","Type":"ContainerDied","Data":"c97587bef6c1903630ab03f7b04b416310f10567d0380856dfafac402dbb08de"} Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.096527 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.096537 4684 scope.go:117] "RemoveContainer" containerID="a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.189000 4684 scope.go:117] "RemoveContainer" containerID="3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.196203 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.209690 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.221184 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:27:01 crc kubenswrapper[4684]: E1013 13:27:01.221748 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerName="rabbitmq" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.221776 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerName="rabbitmq" Oct 13 13:27:01 crc kubenswrapper[4684]: E1013 13:27:01.221818 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerName="setup-container" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.221830 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerName="setup-container" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.222177 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" containerName="rabbitmq" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.223790 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.226796 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.227157 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.227586 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-njm7l" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.227850 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.228311 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.228338 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.230231 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.231448 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.241893 4684 scope.go:117] "RemoveContainer" containerID="a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b" Oct 13 13:27:01 crc kubenswrapper[4684]: E1013 13:27:01.242585 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b\": container with ID starting with a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b not found: ID does not exist" containerID="a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.242623 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b"} err="failed to get container status \"a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b\": rpc error: code = NotFound desc = could not find container \"a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b\": container with ID starting with a04e674ecf0bfea746018f05d881840bff8745c72141322c4d0526284203f55b not found: ID does not exist" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.242650 4684 scope.go:117] "RemoveContainer" containerID="3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16" Oct 13 13:27:01 crc kubenswrapper[4684]: E1013 13:27:01.243548 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16\": container with ID starting with 3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16 not found: ID does not exist" containerID="3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.243629 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16"} err="failed to get container status \"3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16\": rpc error: code = NotFound desc = could not find container \"3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16\": container with ID starting with 3ca8fe97998abbb2c1cd3dfd410dc6cdf2f4f2abce4f4ba7eba8b0f3958fde16 not found: ID does not exist" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382617 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382687 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382720 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-config-data\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382738 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382789 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382809 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.382970 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.383120 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.383306 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdsn4\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-kube-api-access-bdsn4\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.383371 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.383407 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485626 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485683 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485708 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-config-data\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485727 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485774 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485804 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.485840 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.486043 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.486164 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdsn4\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-kube-api-access-bdsn4\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.486193 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.486222 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.486465 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.486723 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.487036 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.487516 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-config-data\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.487803 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.487991 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.492809 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.492837 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.492928 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.504728 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdsn4\" (UniqueName: \"kubernetes.io/projected/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-kube-api-access-bdsn4\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.508942 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7db9b5b9-e5be-4555-bed6-2fd9d9159b40-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.543761 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"7db9b5b9-e5be-4555-bed6-2fd9d9159b40\") " pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.560042 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.912486 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994681 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c869998b-76f5-409d-9ff4-4abe3f7c9289-erlang-cookie-secret\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994770 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-plugins\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994828 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-server-conf\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994855 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkq8m\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-kube-api-access-rkq8m\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994885 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994949 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c869998b-76f5-409d-9ff4-4abe3f7c9289-pod-info\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.994979 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-erlang-cookie\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.995015 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-config-data\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.995058 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-confd\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.995168 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-plugins-conf\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.995221 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-tls\") pod \"c869998b-76f5-409d-9ff4-4abe3f7c9289\" (UID: \"c869998b-76f5-409d-9ff4-4abe3f7c9289\") " Oct 13 13:27:01 crc kubenswrapper[4684]: I1013 13:27:01.998390 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.000196 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.001896 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.002946 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.007636 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-kube-api-access-rkq8m" (OuterVolumeSpecName: "kube-api-access-rkq8m") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "kube-api-access-rkq8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.007957 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c869998b-76f5-409d-9ff4-4abe3f7c9289-pod-info" (OuterVolumeSpecName: "pod-info") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.012230 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c869998b-76f5-409d-9ff4-4abe3f7c9289-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.013682 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.038566 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-config-data" (OuterVolumeSpecName: "config-data") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.065968 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-server-conf" (OuterVolumeSpecName: "server-conf") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101104 4684 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101136 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101145 4684 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c869998b-76f5-409d-9ff4-4abe3f7c9289-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101154 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101162 4684 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-server-conf\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101171 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkq8m\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-kube-api-access-rkq8m\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101200 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101210 4684 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c869998b-76f5-409d-9ff4-4abe3f7c9289-pod-info\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101219 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.101241 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c869998b-76f5-409d-9ff4-4abe3f7c9289-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.109524 4684 generic.go:334] "Generic (PLEG): container finished" podID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerID="4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59" exitCode=0 Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.109586 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.109592 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c869998b-76f5-409d-9ff4-4abe3f7c9289","Type":"ContainerDied","Data":"4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59"} Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.109650 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c869998b-76f5-409d-9ff4-4abe3f7c9289","Type":"ContainerDied","Data":"30d11a503f43caaa38147a704515c19add3431ff78c64105d56a4814b56b9e7e"} Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.109671 4684 scope.go:117] "RemoveContainer" containerID="4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.123181 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.138270 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c869998b-76f5-409d-9ff4-4abe3f7c9289" (UID: "c869998b-76f5-409d-9ff4-4abe3f7c9289"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.145167 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.145859 4684 scope.go:117] "RemoveContainer" containerID="1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.175547 4684 scope.go:117] "RemoveContainer" containerID="4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59" Oct 13 13:27:02 crc kubenswrapper[4684]: E1013 13:27:02.176429 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59\": container with ID starting with 4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59 not found: ID does not exist" containerID="4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.176468 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59"} err="failed to get container status \"4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59\": rpc error: code = NotFound desc = could not find container \"4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59\": container with ID starting with 4bcb76d0b48c055f353c0a74a0c9468ce22a179a84be052dbd887ed5949b8f59 not found: ID does not exist" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.176499 4684 scope.go:117] "RemoveContainer" containerID="1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b" Oct 13 13:27:02 crc kubenswrapper[4684]: E1013 13:27:02.176846 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b\": container with ID starting with 1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b not found: ID does not exist" containerID="1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.176874 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b"} err="failed to get container status \"1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b\": rpc error: code = NotFound desc = could not find container \"1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b\": container with ID starting with 1c5c4ba96b7172f7b427be234ebb1240b723c34afd1406b81ab69e47670a3d5b not found: ID does not exist" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.203277 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.203310 4684 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c869998b-76f5-409d-9ff4-4abe3f7c9289-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.362773 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b29378a-0de1-402e-993a-a83cc3d41b67" path="/var/lib/kubelet/pods/1b29378a-0de1-402e-993a-a83cc3d41b67/volumes" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.433345 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.441459 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.462802 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:27:02 crc kubenswrapper[4684]: E1013 13:27:02.463246 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerName="rabbitmq" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.463268 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerName="rabbitmq" Oct 13 13:27:02 crc kubenswrapper[4684]: E1013 13:27:02.463291 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerName="setup-container" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.463298 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerName="setup-container" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.463529 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" containerName="rabbitmq" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.464551 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.468164 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.468336 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.468719 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.468867 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-vsbcb" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.468892 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.468971 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.469052 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.490379 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626371 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626420 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626453 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626490 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626615 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626688 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626719 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dk9k\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-kube-api-access-8dk9k\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626848 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.626977 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.627027 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.728877 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.728955 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.729014 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dk9k\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-kube-api-access-8dk9k\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.729035 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.729522 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.729876 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.729622 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.729955 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730126 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730355 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730394 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730432 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730454 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730480 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.730600 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.731390 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.731928 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.734114 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.734675 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.742275 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.743055 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.752779 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dk9k\" (UniqueName: \"kubernetes.io/projected/26f3f75b-5e4f-4f4c-b8ec-53352400c7ef-kube-api-access-8dk9k\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.765124 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:02 crc kubenswrapper[4684]: I1013 13:27:02.824964 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.121552 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7db9b5b9-e5be-4555-bed6-2fd9d9159b40","Type":"ContainerStarted","Data":"a04a7c6f7be978e1cf3f3fbf708767ae6349dddf8b8f3057b64342924759e775"} Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.282659 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 13:27:03 crc kubenswrapper[4684]: W1013 13:27:03.284745 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26f3f75b_5e4f_4f4c_b8ec_53352400c7ef.slice/crio-de094c9fb3554ef2c3b431ee40185b595577d5d337e5a6698b60fb9b5d41d667 WatchSource:0}: Error finding container de094c9fb3554ef2c3b431ee40185b595577d5d337e5a6698b60fb9b5d41d667: Status 404 returned error can't find the container with id de094c9fb3554ef2c3b431ee40185b595577d5d337e5a6698b60fb9b5d41d667 Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.744140 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5898d8f4cc-s8h6n"] Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.746661 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.748558 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.760766 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5898d8f4cc-s8h6n"] Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.849648 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-sb\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.849753 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-swift-storage-0\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.849871 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-svc\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.850669 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-nb\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.850770 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-config\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.850838 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zdtf\" (UniqueName: \"kubernetes.io/projected/41e60b37-84bf-498e-926a-26d566392f2b-kube-api-access-6zdtf\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.850857 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.952713 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-swift-storage-0\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.953037 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-svc\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.953090 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-nb\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.953122 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-config\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.953153 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zdtf\" (UniqueName: \"kubernetes.io/projected/41e60b37-84bf-498e-926a-26d566392f2b-kube-api-access-6zdtf\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.953169 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.953224 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-sb\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.967720 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-swift-storage-0\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.967942 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-config\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.967939 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-nb\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.968117 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-sb\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.968131 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-svc\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.968283 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:03 crc kubenswrapper[4684]: I1013 13:27:03.976622 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zdtf\" (UniqueName: \"kubernetes.io/projected/41e60b37-84bf-498e-926a-26d566392f2b-kube-api-access-6zdtf\") pod \"dnsmasq-dns-5898d8f4cc-s8h6n\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:04 crc kubenswrapper[4684]: I1013 13:27:04.076710 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:04 crc kubenswrapper[4684]: I1013 13:27:04.142103 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef","Type":"ContainerStarted","Data":"de094c9fb3554ef2c3b431ee40185b595577d5d337e5a6698b60fb9b5d41d667"} Oct 13 13:27:04 crc kubenswrapper[4684]: I1013 13:27:04.144560 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7db9b5b9-e5be-4555-bed6-2fd9d9159b40","Type":"ContainerStarted","Data":"4612ad4d4963093f7378eb63fa99d0f913f8d5b832d092aa2cd45a0ac80ff355"} Oct 13 13:27:04 crc kubenswrapper[4684]: I1013 13:27:04.363047 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c869998b-76f5-409d-9ff4-4abe3f7c9289" path="/var/lib/kubelet/pods/c869998b-76f5-409d-9ff4-4abe3f7c9289/volumes" Oct 13 13:27:04 crc kubenswrapper[4684]: I1013 13:27:04.557605 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5898d8f4cc-s8h6n"] Oct 13 13:27:05 crc kubenswrapper[4684]: I1013 13:27:05.157786 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" event={"ID":"41e60b37-84bf-498e-926a-26d566392f2b","Type":"ContainerStarted","Data":"9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92"} Oct 13 13:27:05 crc kubenswrapper[4684]: I1013 13:27:05.158431 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" event={"ID":"41e60b37-84bf-498e-926a-26d566392f2b","Type":"ContainerStarted","Data":"a0cde8b31532f0557375b0e3295eb77a6e22dcb84268cd4facb10a9fad35707b"} Oct 13 13:27:05 crc kubenswrapper[4684]: I1013 13:27:05.160171 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef","Type":"ContainerStarted","Data":"5f20241883288ad7afc7d60960c800753aaeb0c6d36f2e952a0bacad17ba288a"} Oct 13 13:27:06 crc kubenswrapper[4684]: I1013 13:27:06.169508 4684 generic.go:334] "Generic (PLEG): container finished" podID="41e60b37-84bf-498e-926a-26d566392f2b" containerID="9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92" exitCode=0 Oct 13 13:27:06 crc kubenswrapper[4684]: I1013 13:27:06.170028 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" event={"ID":"41e60b37-84bf-498e-926a-26d566392f2b","Type":"ContainerDied","Data":"9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92"} Oct 13 13:27:07 crc kubenswrapper[4684]: I1013 13:27:07.180357 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" event={"ID":"41e60b37-84bf-498e-926a-26d566392f2b","Type":"ContainerStarted","Data":"c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e"} Oct 13 13:27:07 crc kubenswrapper[4684]: I1013 13:27:07.181279 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:07 crc kubenswrapper[4684]: I1013 13:27:07.209639 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" podStartSLOduration=4.209614375 podStartE2EDuration="4.209614375s" podCreationTimestamp="2025-10-13 13:27:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:27:07.198659802 +0000 UTC m=+1181.766043902" watchObservedRunningTime="2025-10-13 13:27:07.209614375 +0000 UTC m=+1181.776998455" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.078085 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.168581 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54556fbfd5-lzq42"] Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.240984 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerName="dnsmasq-dns" containerID="cri-o://c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b" gracePeriod=10 Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.281993 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67567868d9-g9dwp"] Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.283826 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.304756 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67567868d9-g9dwp"] Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.358936 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mslsv\" (UniqueName: \"kubernetes.io/projected/c04a3f8c-5d8c-4c33-8964-e31de4003949-kube-api-access-mslsv\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.358998 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-config\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.359035 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-dns-svc\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.359194 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-openstack-edpm-ipam\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.359364 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-ovsdbserver-nb\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.359396 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-ovsdbserver-sb\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.359468 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-dns-swift-storage-0\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460652 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-dns-swift-storage-0\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460732 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mslsv\" (UniqueName: \"kubernetes.io/projected/c04a3f8c-5d8c-4c33-8964-e31de4003949-kube-api-access-mslsv\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460765 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-config\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460781 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-dns-svc\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460800 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-openstack-edpm-ipam\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460848 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-ovsdbserver-nb\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.460864 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-ovsdbserver-sb\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.461655 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-ovsdbserver-sb\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.462222 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-dns-swift-storage-0\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.463272 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-dns-svc\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.463769 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-config\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.464224 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-ovsdbserver-nb\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.465245 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c04a3f8c-5d8c-4c33-8964-e31de4003949-openstack-edpm-ipam\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.497813 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mslsv\" (UniqueName: \"kubernetes.io/projected/c04a3f8c-5d8c-4c33-8964-e31de4003949-kube-api-access-mslsv\") pod \"dnsmasq-dns-67567868d9-g9dwp\" (UID: \"c04a3f8c-5d8c-4c33-8964-e31de4003949\") " pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.613672 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.747399 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.873073 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-nb\") pod \"1992e4de-4eef-4025-829e-aa304f4ee7da\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.873189 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-sb\") pod \"1992e4de-4eef-4025-829e-aa304f4ee7da\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.873230 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-swift-storage-0\") pod \"1992e4de-4eef-4025-829e-aa304f4ee7da\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.873282 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-config\") pod \"1992e4de-4eef-4025-829e-aa304f4ee7da\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.873321 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krwk4\" (UniqueName: \"kubernetes.io/projected/1992e4de-4eef-4025-829e-aa304f4ee7da-kube-api-access-krwk4\") pod \"1992e4de-4eef-4025-829e-aa304f4ee7da\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.873391 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-svc\") pod \"1992e4de-4eef-4025-829e-aa304f4ee7da\" (UID: \"1992e4de-4eef-4025-829e-aa304f4ee7da\") " Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.878736 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1992e4de-4eef-4025-829e-aa304f4ee7da-kube-api-access-krwk4" (OuterVolumeSpecName: "kube-api-access-krwk4") pod "1992e4de-4eef-4025-829e-aa304f4ee7da" (UID: "1992e4de-4eef-4025-829e-aa304f4ee7da"). InnerVolumeSpecName "kube-api-access-krwk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.921252 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1992e4de-4eef-4025-829e-aa304f4ee7da" (UID: "1992e4de-4eef-4025-829e-aa304f4ee7da"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.931479 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1992e4de-4eef-4025-829e-aa304f4ee7da" (UID: "1992e4de-4eef-4025-829e-aa304f4ee7da"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.937775 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1992e4de-4eef-4025-829e-aa304f4ee7da" (UID: "1992e4de-4eef-4025-829e-aa304f4ee7da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.939711 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1992e4de-4eef-4025-829e-aa304f4ee7da" (UID: "1992e4de-4eef-4025-829e-aa304f4ee7da"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.942074 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-config" (OuterVolumeSpecName: "config") pod "1992e4de-4eef-4025-829e-aa304f4ee7da" (UID: "1992e4de-4eef-4025-829e-aa304f4ee7da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.975365 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.975399 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.975411 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.975425 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.975436 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krwk4\" (UniqueName: \"kubernetes.io/projected/1992e4de-4eef-4025-829e-aa304f4ee7da-kube-api-access-krwk4\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:14 crc kubenswrapper[4684]: I1013 13:27:14.975449 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1992e4de-4eef-4025-829e-aa304f4ee7da-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.104359 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67567868d9-g9dwp"] Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.256047 4684 generic.go:334] "Generic (PLEG): container finished" podID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerID="c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b" exitCode=0 Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.256118 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" event={"ID":"1992e4de-4eef-4025-829e-aa304f4ee7da","Type":"ContainerDied","Data":"c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b"} Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.256149 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" event={"ID":"1992e4de-4eef-4025-829e-aa304f4ee7da","Type":"ContainerDied","Data":"0242dcbe1f096536a50116af9e29882e5252bf50204771471e4e50dd71d2fd27"} Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.256169 4684 scope.go:117] "RemoveContainer" containerID="c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.256297 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54556fbfd5-lzq42" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.264652 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" event={"ID":"c04a3f8c-5d8c-4c33-8964-e31de4003949","Type":"ContainerStarted","Data":"4bb97533cedd2e4bd34b7462ad5993b6dfe421c6e056fe3d2579b73de87a567b"} Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.296428 4684 scope.go:117] "RemoveContainer" containerID="e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.300762 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54556fbfd5-lzq42"] Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.308647 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54556fbfd5-lzq42"] Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.319542 4684 scope.go:117] "RemoveContainer" containerID="c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b" Oct 13 13:27:15 crc kubenswrapper[4684]: E1013 13:27:15.320095 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b\": container with ID starting with c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b not found: ID does not exist" containerID="c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.320149 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b"} err="failed to get container status \"c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b\": rpc error: code = NotFound desc = could not find container \"c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b\": container with ID starting with c07573ccd7517763d40b19ea3371788af0f18efc17f8ad3997ce371ec35f1d9b not found: ID does not exist" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.320181 4684 scope.go:117] "RemoveContainer" containerID="e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184" Oct 13 13:27:15 crc kubenswrapper[4684]: E1013 13:27:15.320710 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184\": container with ID starting with e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184 not found: ID does not exist" containerID="e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184" Oct 13 13:27:15 crc kubenswrapper[4684]: I1013 13:27:15.320737 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184"} err="failed to get container status \"e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184\": rpc error: code = NotFound desc = could not find container \"e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184\": container with ID starting with e2e44e9167e679e2e20e58bd3a28bab4ba19843bf331f8cf4290108cb9824184 not found: ID does not exist" Oct 13 13:27:16 crc kubenswrapper[4684]: I1013 13:27:16.276094 4684 generic.go:334] "Generic (PLEG): container finished" podID="c04a3f8c-5d8c-4c33-8964-e31de4003949" containerID="11e60bf90ebcee1b8d8733e848c11d5e567ab475d88f65e542b762dc84ff1261" exitCode=0 Oct 13 13:27:16 crc kubenswrapper[4684]: I1013 13:27:16.276280 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" event={"ID":"c04a3f8c-5d8c-4c33-8964-e31de4003949","Type":"ContainerDied","Data":"11e60bf90ebcee1b8d8733e848c11d5e567ab475d88f65e542b762dc84ff1261"} Oct 13 13:27:16 crc kubenswrapper[4684]: I1013 13:27:16.364395 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" path="/var/lib/kubelet/pods/1992e4de-4eef-4025-829e-aa304f4ee7da/volumes" Oct 13 13:27:17 crc kubenswrapper[4684]: I1013 13:27:17.286484 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" event={"ID":"c04a3f8c-5d8c-4c33-8964-e31de4003949","Type":"ContainerStarted","Data":"8099798acb3849023517b9e84b673c262726eed8bee87b2012d1157fa99e3027"} Oct 13 13:27:17 crc kubenswrapper[4684]: I1013 13:27:17.287107 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:17 crc kubenswrapper[4684]: I1013 13:27:17.305568 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" podStartSLOduration=3.305547459 podStartE2EDuration="3.305547459s" podCreationTimestamp="2025-10-13 13:27:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:27:17.302491653 +0000 UTC m=+1191.869875753" watchObservedRunningTime="2025-10-13 13:27:17.305547459 +0000 UTC m=+1191.872931529" Oct 13 13:27:24 crc kubenswrapper[4684]: I1013 13:27:24.616081 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67567868d9-g9dwp" Oct 13 13:27:24 crc kubenswrapper[4684]: I1013 13:27:24.683501 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5898d8f4cc-s8h6n"] Oct 13 13:27:24 crc kubenswrapper[4684]: I1013 13:27:24.684186 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" podUID="41e60b37-84bf-498e-926a-26d566392f2b" containerName="dnsmasq-dns" containerID="cri-o://c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e" gracePeriod=10 Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.145314 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.287749 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-nb\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.287878 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-openstack-edpm-ipam\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.287926 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-config\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.287946 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zdtf\" (UniqueName: \"kubernetes.io/projected/41e60b37-84bf-498e-926a-26d566392f2b-kube-api-access-6zdtf\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.287967 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-sb\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.288019 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-swift-storage-0\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.288107 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-svc\") pod \"41e60b37-84bf-498e-926a-26d566392f2b\" (UID: \"41e60b37-84bf-498e-926a-26d566392f2b\") " Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.295456 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41e60b37-84bf-498e-926a-26d566392f2b-kube-api-access-6zdtf" (OuterVolumeSpecName: "kube-api-access-6zdtf") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "kube-api-access-6zdtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.350632 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.352052 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.358003 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.358145 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.361353 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-config" (OuterVolumeSpecName: "config") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.368272 4684 generic.go:334] "Generic (PLEG): container finished" podID="41e60b37-84bf-498e-926a-26d566392f2b" containerID="c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e" exitCode=0 Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.368445 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" event={"ID":"41e60b37-84bf-498e-926a-26d566392f2b","Type":"ContainerDied","Data":"c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e"} Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.368556 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" event={"ID":"41e60b37-84bf-498e-926a-26d566392f2b","Type":"ContainerDied","Data":"a0cde8b31532f0557375b0e3295eb77a6e22dcb84268cd4facb10a9fad35707b"} Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.368628 4684 scope.go:117] "RemoveContainer" containerID="c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.368818 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5898d8f4cc-s8h6n" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.381173 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "41e60b37-84bf-498e-926a-26d566392f2b" (UID: "41e60b37-84bf-498e-926a-26d566392f2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394281 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394322 4684 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-config\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394335 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zdtf\" (UniqueName: \"kubernetes.io/projected/41e60b37-84bf-498e-926a-26d566392f2b-kube-api-access-6zdtf\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394387 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394400 4684 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394411 4684 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.394422 4684 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41e60b37-84bf-498e-926a-26d566392f2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.427826 4684 scope.go:117] "RemoveContainer" containerID="9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.455151 4684 scope.go:117] "RemoveContainer" containerID="c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e" Oct 13 13:27:25 crc kubenswrapper[4684]: E1013 13:27:25.455585 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e\": container with ID starting with c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e not found: ID does not exist" containerID="c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.455630 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e"} err="failed to get container status \"c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e\": rpc error: code = NotFound desc = could not find container \"c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e\": container with ID starting with c00e65ee82c11d202ac7d24aa452695256b63dbd8981e9e1a1b6de2450fabe7e not found: ID does not exist" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.455656 4684 scope.go:117] "RemoveContainer" containerID="9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92" Oct 13 13:27:25 crc kubenswrapper[4684]: E1013 13:27:25.456087 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92\": container with ID starting with 9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92 not found: ID does not exist" containerID="9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.456119 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92"} err="failed to get container status \"9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92\": rpc error: code = NotFound desc = could not find container \"9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92\": container with ID starting with 9ca95f098d509a339daa91ebf629638e4e037fd05986af48001e4dbec6051b92 not found: ID does not exist" Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.710853 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5898d8f4cc-s8h6n"] Oct 13 13:27:25 crc kubenswrapper[4684]: I1013 13:27:25.720532 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5898d8f4cc-s8h6n"] Oct 13 13:27:26 crc kubenswrapper[4684]: I1013 13:27:26.360975 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41e60b37-84bf-498e-926a-26d566392f2b" path="/var/lib/kubelet/pods/41e60b37-84bf-498e-926a-26d566392f2b/volumes" Oct 13 13:27:36 crc kubenswrapper[4684]: I1013 13:27:36.472138 4684 generic.go:334] "Generic (PLEG): container finished" podID="7db9b5b9-e5be-4555-bed6-2fd9d9159b40" containerID="4612ad4d4963093f7378eb63fa99d0f913f8d5b832d092aa2cd45a0ac80ff355" exitCode=0 Oct 13 13:27:36 crc kubenswrapper[4684]: I1013 13:27:36.472230 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7db9b5b9-e5be-4555-bed6-2fd9d9159b40","Type":"ContainerDied","Data":"4612ad4d4963093f7378eb63fa99d0f913f8d5b832d092aa2cd45a0ac80ff355"} Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.485292 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7db9b5b9-e5be-4555-bed6-2fd9d9159b40","Type":"ContainerStarted","Data":"9b266b8e672749919e79add7e9fa49b4e2588db341749dd52c56490102129345"} Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.486077 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.486810 4684 generic.go:334] "Generic (PLEG): container finished" podID="26f3f75b-5e4f-4f4c-b8ec-53352400c7ef" containerID="5f20241883288ad7afc7d60960c800753aaeb0c6d36f2e952a0bacad17ba288a" exitCode=0 Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.486838 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef","Type":"ContainerDied","Data":"5f20241883288ad7afc7d60960c800753aaeb0c6d36f2e952a0bacad17ba288a"} Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.523807 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.523755618 podStartE2EDuration="36.523755618s" podCreationTimestamp="2025-10-13 13:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:27:37.511081961 +0000 UTC m=+1212.078466041" watchObservedRunningTime="2025-10-13 13:27:37.523755618 +0000 UTC m=+1212.091139688" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630063 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt"] Oct 13 13:27:37 crc kubenswrapper[4684]: E1013 13:27:37.630416 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e60b37-84bf-498e-926a-26d566392f2b" containerName="init" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630431 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e60b37-84bf-498e-926a-26d566392f2b" containerName="init" Oct 13 13:27:37 crc kubenswrapper[4684]: E1013 13:27:37.630446 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41e60b37-84bf-498e-926a-26d566392f2b" containerName="dnsmasq-dns" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630453 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="41e60b37-84bf-498e-926a-26d566392f2b" containerName="dnsmasq-dns" Oct 13 13:27:37 crc kubenswrapper[4684]: E1013 13:27:37.630470 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerName="dnsmasq-dns" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630476 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerName="dnsmasq-dns" Oct 13 13:27:37 crc kubenswrapper[4684]: E1013 13:27:37.630492 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerName="init" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630498 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerName="init" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630666 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="41e60b37-84bf-498e-926a-26d566392f2b" containerName="dnsmasq-dns" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.630697 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="1992e4de-4eef-4025-829e-aa304f4ee7da" containerName="dnsmasq-dns" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.635915 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.638638 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.638709 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.639033 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.639154 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.642526 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt"] Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.775837 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6mmb\" (UniqueName: \"kubernetes.io/projected/24b2196a-b383-4c65-9c61-992b7305c6ea-kube-api-access-d6mmb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.775892 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.776030 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.776176 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.877672 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6mmb\" (UniqueName: \"kubernetes.io/projected/24b2196a-b383-4c65-9c61-992b7305c6ea-kube-api-access-d6mmb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.877719 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.877751 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.877801 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.885164 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.885210 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.887743 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.894793 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6mmb\" (UniqueName: \"kubernetes.io/projected/24b2196a-b383-4c65-9c61-992b7305c6ea-kube-api-access-d6mmb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:37 crc kubenswrapper[4684]: I1013 13:27:37.995243 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:27:38 crc kubenswrapper[4684]: I1013 13:27:38.497787 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"26f3f75b-5e4f-4f4c-b8ec-53352400c7ef","Type":"ContainerStarted","Data":"289d6416dccc7e38e35161641fd97b901e85713d3827c6834ae040c8575b0014"} Oct 13 13:27:38 crc kubenswrapper[4684]: I1013 13:27:38.499120 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:38 crc kubenswrapper[4684]: I1013 13:27:38.522998 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.522976708 podStartE2EDuration="36.522976708s" podCreationTimestamp="2025-10-13 13:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 13:27:38.521964397 +0000 UTC m=+1213.089348487" watchObservedRunningTime="2025-10-13 13:27:38.522976708 +0000 UTC m=+1213.090360788" Oct 13 13:27:38 crc kubenswrapper[4684]: I1013 13:27:38.599669 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt"] Oct 13 13:27:38 crc kubenswrapper[4684]: W1013 13:27:38.604999 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24b2196a_b383_4c65_9c61_992b7305c6ea.slice/crio-a4ee94175eff1f6c93d9ef45c5df352fdde48c79ea7b5daa5cee227f83d3f6b8 WatchSource:0}: Error finding container a4ee94175eff1f6c93d9ef45c5df352fdde48c79ea7b5daa5cee227f83d3f6b8: Status 404 returned error can't find the container with id a4ee94175eff1f6c93d9ef45c5df352fdde48c79ea7b5daa5cee227f83d3f6b8 Oct 13 13:27:38 crc kubenswrapper[4684]: I1013 13:27:38.608083 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:27:39 crc kubenswrapper[4684]: I1013 13:27:39.518789 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" event={"ID":"24b2196a-b383-4c65-9c61-992b7305c6ea","Type":"ContainerStarted","Data":"a4ee94175eff1f6c93d9ef45c5df352fdde48c79ea7b5daa5cee227f83d3f6b8"} Oct 13 13:27:48 crc kubenswrapper[4684]: I1013 13:27:48.600723 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" event={"ID":"24b2196a-b383-4c65-9c61-992b7305c6ea","Type":"ContainerStarted","Data":"ad914a46a813bd87b70ceb6c506885b9fba420700576a7f7f0708ca00efbd996"} Oct 13 13:27:48 crc kubenswrapper[4684]: I1013 13:27:48.626235 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" podStartSLOduration=2.226518876 podStartE2EDuration="11.62621278s" podCreationTimestamp="2025-10-13 13:27:37 +0000 UTC" firstStartedPulling="2025-10-13 13:27:38.607723059 +0000 UTC m=+1213.175107129" lastFinishedPulling="2025-10-13 13:27:48.007416963 +0000 UTC m=+1222.574801033" observedRunningTime="2025-10-13 13:27:48.619264202 +0000 UTC m=+1223.186648302" watchObservedRunningTime="2025-10-13 13:27:48.62621278 +0000 UTC m=+1223.193596860" Oct 13 13:27:51 crc kubenswrapper[4684]: I1013 13:27:51.563047 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 13 13:27:52 crc kubenswrapper[4684]: I1013 13:27:52.828111 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 13 13:27:59 crc kubenswrapper[4684]: I1013 13:27:59.728549 4684 generic.go:334] "Generic (PLEG): container finished" podID="24b2196a-b383-4c65-9c61-992b7305c6ea" containerID="ad914a46a813bd87b70ceb6c506885b9fba420700576a7f7f0708ca00efbd996" exitCode=0 Oct 13 13:27:59 crc kubenswrapper[4684]: I1013 13:27:59.728642 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" event={"ID":"24b2196a-b383-4c65-9c61-992b7305c6ea","Type":"ContainerDied","Data":"ad914a46a813bd87b70ceb6c506885b9fba420700576a7f7f0708ca00efbd996"} Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.173665 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.327931 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6mmb\" (UniqueName: \"kubernetes.io/projected/24b2196a-b383-4c65-9c61-992b7305c6ea-kube-api-access-d6mmb\") pod \"24b2196a-b383-4c65-9c61-992b7305c6ea\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.327998 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-repo-setup-combined-ca-bundle\") pod \"24b2196a-b383-4c65-9c61-992b7305c6ea\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.328017 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-ssh-key\") pod \"24b2196a-b383-4c65-9c61-992b7305c6ea\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.328130 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-inventory\") pod \"24b2196a-b383-4c65-9c61-992b7305c6ea\" (UID: \"24b2196a-b383-4c65-9c61-992b7305c6ea\") " Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.334845 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "24b2196a-b383-4c65-9c61-992b7305c6ea" (UID: "24b2196a-b383-4c65-9c61-992b7305c6ea"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.337581 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24b2196a-b383-4c65-9c61-992b7305c6ea-kube-api-access-d6mmb" (OuterVolumeSpecName: "kube-api-access-d6mmb") pod "24b2196a-b383-4c65-9c61-992b7305c6ea" (UID: "24b2196a-b383-4c65-9c61-992b7305c6ea"). InnerVolumeSpecName "kube-api-access-d6mmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.358045 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "24b2196a-b383-4c65-9c61-992b7305c6ea" (UID: "24b2196a-b383-4c65-9c61-992b7305c6ea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.360689 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-inventory" (OuterVolumeSpecName: "inventory") pod "24b2196a-b383-4c65-9c61-992b7305c6ea" (UID: "24b2196a-b383-4c65-9c61-992b7305c6ea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.430028 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.430065 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6mmb\" (UniqueName: \"kubernetes.io/projected/24b2196a-b383-4c65-9c61-992b7305c6ea-kube-api-access-d6mmb\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.430075 4684 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.430086 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/24b2196a-b383-4c65-9c61-992b7305c6ea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.749653 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" event={"ID":"24b2196a-b383-4c65-9c61-992b7305c6ea","Type":"ContainerDied","Data":"a4ee94175eff1f6c93d9ef45c5df352fdde48c79ea7b5daa5cee227f83d3f6b8"} Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.749948 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4ee94175eff1f6c93d9ef45c5df352fdde48c79ea7b5daa5cee227f83d3f6b8" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.750057 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.826556 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6"] Oct 13 13:28:01 crc kubenswrapper[4684]: E1013 13:28:01.826996 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24b2196a-b383-4c65-9c61-992b7305c6ea" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.827020 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="24b2196a-b383-4c65-9c61-992b7305c6ea" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.827500 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="24b2196a-b383-4c65-9c61-992b7305c6ea" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.828255 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.831696 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.831727 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.831774 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.833676 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.869356 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6"] Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.937886 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.938311 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:01 crc kubenswrapper[4684]: I1013 13:28:01.938335 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhfpn\" (UniqueName: \"kubernetes.io/projected/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-kube-api-access-bhfpn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.040400 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.040455 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhfpn\" (UniqueName: \"kubernetes.io/projected/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-kube-api-access-bhfpn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.040627 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.046693 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.047403 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.057886 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhfpn\" (UniqueName: \"kubernetes.io/projected/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-kube-api-access-bhfpn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-dcvr6\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.177798 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.672800 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6"] Oct 13 13:28:02 crc kubenswrapper[4684]: W1013 13:28:02.676287 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3af107fc_1ddc_4c90_80c1_c3fed25bddcb.slice/crio-b49d06441df7ed5da09c3a6c09adaef20f2e2e2461c196b7a8a5b215ce75805a WatchSource:0}: Error finding container b49d06441df7ed5da09c3a6c09adaef20f2e2e2461c196b7a8a5b215ce75805a: Status 404 returned error can't find the container with id b49d06441df7ed5da09c3a6c09adaef20f2e2e2461c196b7a8a5b215ce75805a Oct 13 13:28:02 crc kubenswrapper[4684]: I1013 13:28:02.770810 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" event={"ID":"3af107fc-1ddc-4c90-80c1-c3fed25bddcb","Type":"ContainerStarted","Data":"b49d06441df7ed5da09c3a6c09adaef20f2e2e2461c196b7a8a5b215ce75805a"} Oct 13 13:28:03 crc kubenswrapper[4684]: I1013 13:28:03.784622 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" event={"ID":"3af107fc-1ddc-4c90-80c1-c3fed25bddcb","Type":"ContainerStarted","Data":"a365fb82e6956fd27711f6d867d4f2f8aab501a9744bac439962835be922bf6a"} Oct 13 13:28:03 crc kubenswrapper[4684]: I1013 13:28:03.812300 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" podStartSLOduration=2.2604704079999998 podStartE2EDuration="2.812280112s" podCreationTimestamp="2025-10-13 13:28:01 +0000 UTC" firstStartedPulling="2025-10-13 13:28:02.678105835 +0000 UTC m=+1237.245489905" lastFinishedPulling="2025-10-13 13:28:03.229915549 +0000 UTC m=+1237.797299609" observedRunningTime="2025-10-13 13:28:03.80395405 +0000 UTC m=+1238.371338130" watchObservedRunningTime="2025-10-13 13:28:03.812280112 +0000 UTC m=+1238.379664192" Oct 13 13:28:06 crc kubenswrapper[4684]: I1013 13:28:06.826357 4684 generic.go:334] "Generic (PLEG): container finished" podID="3af107fc-1ddc-4c90-80c1-c3fed25bddcb" containerID="a365fb82e6956fd27711f6d867d4f2f8aab501a9744bac439962835be922bf6a" exitCode=0 Oct 13 13:28:06 crc kubenswrapper[4684]: I1013 13:28:06.826473 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" event={"ID":"3af107fc-1ddc-4c90-80c1-c3fed25bddcb","Type":"ContainerDied","Data":"a365fb82e6956fd27711f6d867d4f2f8aab501a9744bac439962835be922bf6a"} Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.297016 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.363648 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhfpn\" (UniqueName: \"kubernetes.io/projected/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-kube-api-access-bhfpn\") pod \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.363722 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-ssh-key\") pod \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.363770 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-inventory\") pod \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\" (UID: \"3af107fc-1ddc-4c90-80c1-c3fed25bddcb\") " Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.369560 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-kube-api-access-bhfpn" (OuterVolumeSpecName: "kube-api-access-bhfpn") pod "3af107fc-1ddc-4c90-80c1-c3fed25bddcb" (UID: "3af107fc-1ddc-4c90-80c1-c3fed25bddcb"). InnerVolumeSpecName "kube-api-access-bhfpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.392151 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3af107fc-1ddc-4c90-80c1-c3fed25bddcb" (UID: "3af107fc-1ddc-4c90-80c1-c3fed25bddcb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.396514 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-inventory" (OuterVolumeSpecName: "inventory") pod "3af107fc-1ddc-4c90-80c1-c3fed25bddcb" (UID: "3af107fc-1ddc-4c90-80c1-c3fed25bddcb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.466321 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhfpn\" (UniqueName: \"kubernetes.io/projected/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-kube-api-access-bhfpn\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.466368 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.466388 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3af107fc-1ddc-4c90-80c1-c3fed25bddcb-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.848028 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" event={"ID":"3af107fc-1ddc-4c90-80c1-c3fed25bddcb","Type":"ContainerDied","Data":"b49d06441df7ed5da09c3a6c09adaef20f2e2e2461c196b7a8a5b215ce75805a"} Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.848079 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b49d06441df7ed5da09c3a6c09adaef20f2e2e2461c196b7a8a5b215ce75805a" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.848092 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-dcvr6" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.937501 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm"] Oct 13 13:28:08 crc kubenswrapper[4684]: E1013 13:28:08.937946 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af107fc-1ddc-4c90-80c1-c3fed25bddcb" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.937981 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af107fc-1ddc-4c90-80c1-c3fed25bddcb" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.938266 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="3af107fc-1ddc-4c90-80c1-c3fed25bddcb" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.938971 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.941063 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.942842 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.942856 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.942939 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:28:08 crc kubenswrapper[4684]: I1013 13:28:08.956588 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm"] Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.076755 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.077207 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.077255 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7brn5\" (UniqueName: \"kubernetes.io/projected/4bf002ba-ce9a-40ad-a860-0572fc61d996-kube-api-access-7brn5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.077289 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.178943 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.179116 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.179183 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.179228 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7brn5\" (UniqueName: \"kubernetes.io/projected/4bf002ba-ce9a-40ad-a860-0572fc61d996-kube-api-access-7brn5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.189224 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.191413 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.191616 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.199876 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7brn5\" (UniqueName: \"kubernetes.io/projected/4bf002ba-ce9a-40ad-a860-0572fc61d996-kube-api-access-7brn5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.256243 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.815417 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm"] Oct 13 13:28:09 crc kubenswrapper[4684]: I1013 13:28:09.858615 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" event={"ID":"4bf002ba-ce9a-40ad-a860-0572fc61d996","Type":"ContainerStarted","Data":"62c1ad5723a9d4592d03fbb7642bff42dd53eb98fcea02d179bffb576016dbbc"} Oct 13 13:28:10 crc kubenswrapper[4684]: I1013 13:28:10.888624 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" event={"ID":"4bf002ba-ce9a-40ad-a860-0572fc61d996","Type":"ContainerStarted","Data":"ab27e5f802cfc4afd92a2e3fea3936b8603ec3d61307de2ce3eede4ab6d97660"} Oct 13 13:28:10 crc kubenswrapper[4684]: I1013 13:28:10.918535 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" podStartSLOduration=2.463190971 podStartE2EDuration="2.918502775s" podCreationTimestamp="2025-10-13 13:28:08 +0000 UTC" firstStartedPulling="2025-10-13 13:28:09.817337704 +0000 UTC m=+1244.384721794" lastFinishedPulling="2025-10-13 13:28:10.272649528 +0000 UTC m=+1244.840033598" observedRunningTime="2025-10-13 13:28:10.901378937 +0000 UTC m=+1245.468763087" watchObservedRunningTime="2025-10-13 13:28:10.918502775 +0000 UTC m=+1245.485886885" Oct 13 13:28:30 crc kubenswrapper[4684]: I1013 13:28:30.560162 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:28:30 crc kubenswrapper[4684]: I1013 13:28:30.560723 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:28:48 crc kubenswrapper[4684]: I1013 13:28:48.034685 4684 scope.go:117] "RemoveContainer" containerID="1522ca99fe7e67706cece972b62f359904794a816f17d6b7cbaa22929a8b5b02" Oct 13 13:28:48 crc kubenswrapper[4684]: I1013 13:28:48.068976 4684 scope.go:117] "RemoveContainer" containerID="cb7301f5a63ee4c2a3467ca0ea6f1dc776d1930893b4b15caa401f819af78d53" Oct 13 13:29:00 crc kubenswrapper[4684]: I1013 13:29:00.559581 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:29:00 crc kubenswrapper[4684]: I1013 13:29:00.560155 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.559774 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.561088 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.561159 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.561968 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d2e805361345d8e27af996006522c59e8ccaab786312d030265294583995b587"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.562034 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://d2e805361345d8e27af996006522c59e8ccaab786312d030265294583995b587" gracePeriod=600 Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.717713 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="d2e805361345d8e27af996006522c59e8ccaab786312d030265294583995b587" exitCode=0 Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.717786 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"d2e805361345d8e27af996006522c59e8ccaab786312d030265294583995b587"} Oct 13 13:29:30 crc kubenswrapper[4684]: I1013 13:29:30.717978 4684 scope.go:117] "RemoveContainer" containerID="763ba38e9ba892e0c9f5a9e9f4dafff85d37a0067ef41eb06df8ee48a015f12a" Oct 13 13:29:31 crc kubenswrapper[4684]: I1013 13:29:31.731026 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150"} Oct 13 13:29:48 crc kubenswrapper[4684]: I1013 13:29:48.216671 4684 scope.go:117] "RemoveContainer" containerID="35b47892ffc3d492ff650a93f630bb8ea010af74ccef7ed53f3a1aef8dcaf08f" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.164249 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn"] Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.168518 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.171364 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.172962 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.178079 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn"] Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.234098 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th8r5\" (UniqueName: \"kubernetes.io/projected/ab1ae631-3915-4339-b5ac-3b190dac48b7-kube-api-access-th8r5\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.234413 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab1ae631-3915-4339-b5ac-3b190dac48b7-config-volume\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.234587 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab1ae631-3915-4339-b5ac-3b190dac48b7-secret-volume\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.336054 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th8r5\" (UniqueName: \"kubernetes.io/projected/ab1ae631-3915-4339-b5ac-3b190dac48b7-kube-api-access-th8r5\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.336178 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab1ae631-3915-4339-b5ac-3b190dac48b7-config-volume\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.336225 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab1ae631-3915-4339-b5ac-3b190dac48b7-secret-volume\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.337326 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab1ae631-3915-4339-b5ac-3b190dac48b7-config-volume\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.344084 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab1ae631-3915-4339-b5ac-3b190dac48b7-secret-volume\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.364067 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th8r5\" (UniqueName: \"kubernetes.io/projected/ab1ae631-3915-4339-b5ac-3b190dac48b7-kube-api-access-th8r5\") pod \"collect-profiles-29339370-5pbhn\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.499431 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:00 crc kubenswrapper[4684]: I1013 13:30:00.944529 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn"] Oct 13 13:30:01 crc kubenswrapper[4684]: I1013 13:30:01.021770 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" event={"ID":"ab1ae631-3915-4339-b5ac-3b190dac48b7","Type":"ContainerStarted","Data":"0d0488cbfbfea278052cc7bfc66c5be720b7e65babf5c371f4142c9693e8a949"} Oct 13 13:30:02 crc kubenswrapper[4684]: I1013 13:30:02.032095 4684 generic.go:334] "Generic (PLEG): container finished" podID="ab1ae631-3915-4339-b5ac-3b190dac48b7" containerID="5b3e05c4bd3e83acc3ebf12f73607f75b618c37974eb6c20c538d358f1ba0385" exitCode=0 Oct 13 13:30:02 crc kubenswrapper[4684]: I1013 13:30:02.032204 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" event={"ID":"ab1ae631-3915-4339-b5ac-3b190dac48b7","Type":"ContainerDied","Data":"5b3e05c4bd3e83acc3ebf12f73607f75b618c37974eb6c20c538d358f1ba0385"} Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.403051 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.593394 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab1ae631-3915-4339-b5ac-3b190dac48b7-config-volume\") pod \"ab1ae631-3915-4339-b5ac-3b190dac48b7\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.594454 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-th8r5\" (UniqueName: \"kubernetes.io/projected/ab1ae631-3915-4339-b5ac-3b190dac48b7-kube-api-access-th8r5\") pod \"ab1ae631-3915-4339-b5ac-3b190dac48b7\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.594385 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab1ae631-3915-4339-b5ac-3b190dac48b7-config-volume" (OuterVolumeSpecName: "config-volume") pod "ab1ae631-3915-4339-b5ac-3b190dac48b7" (UID: "ab1ae631-3915-4339-b5ac-3b190dac48b7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.595339 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab1ae631-3915-4339-b5ac-3b190dac48b7-secret-volume\") pod \"ab1ae631-3915-4339-b5ac-3b190dac48b7\" (UID: \"ab1ae631-3915-4339-b5ac-3b190dac48b7\") " Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.595924 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab1ae631-3915-4339-b5ac-3b190dac48b7-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.599671 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab1ae631-3915-4339-b5ac-3b190dac48b7-kube-api-access-th8r5" (OuterVolumeSpecName: "kube-api-access-th8r5") pod "ab1ae631-3915-4339-b5ac-3b190dac48b7" (UID: "ab1ae631-3915-4339-b5ac-3b190dac48b7"). InnerVolumeSpecName "kube-api-access-th8r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.605643 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab1ae631-3915-4339-b5ac-3b190dac48b7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ab1ae631-3915-4339-b5ac-3b190dac48b7" (UID: "ab1ae631-3915-4339-b5ac-3b190dac48b7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.698050 4684 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab1ae631-3915-4339-b5ac-3b190dac48b7-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:30:03 crc kubenswrapper[4684]: I1013 13:30:03.698091 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-th8r5\" (UniqueName: \"kubernetes.io/projected/ab1ae631-3915-4339-b5ac-3b190dac48b7-kube-api-access-th8r5\") on node \"crc\" DevicePath \"\"" Oct 13 13:30:04 crc kubenswrapper[4684]: I1013 13:30:04.055818 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" event={"ID":"ab1ae631-3915-4339-b5ac-3b190dac48b7","Type":"ContainerDied","Data":"0d0488cbfbfea278052cc7bfc66c5be720b7e65babf5c371f4142c9693e8a949"} Oct 13 13:30:04 crc kubenswrapper[4684]: I1013 13:30:04.056162 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d0488cbfbfea278052cc7bfc66c5be720b7e65babf5c371f4142c9693e8a949" Oct 13 13:30:04 crc kubenswrapper[4684]: I1013 13:30:04.056072 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.685017 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5frzn"] Oct 13 13:31:13 crc kubenswrapper[4684]: E1013 13:31:13.686033 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab1ae631-3915-4339-b5ac-3b190dac48b7" containerName="collect-profiles" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.686049 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab1ae631-3915-4339-b5ac-3b190dac48b7" containerName="collect-profiles" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.686266 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab1ae631-3915-4339-b5ac-3b190dac48b7" containerName="collect-profiles" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.687769 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.745349 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blj92\" (UniqueName: \"kubernetes.io/projected/268318c6-efb7-4c2c-b739-f60b6cf567a4-kube-api-access-blj92\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.745404 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-catalog-content\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.745754 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-utilities\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.746553 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5frzn"] Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.847999 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-utilities\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.848140 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blj92\" (UniqueName: \"kubernetes.io/projected/268318c6-efb7-4c2c-b739-f60b6cf567a4-kube-api-access-blj92\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.848167 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-catalog-content\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.848433 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-utilities\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.848512 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-catalog-content\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:13 crc kubenswrapper[4684]: I1013 13:31:13.879170 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blj92\" (UniqueName: \"kubernetes.io/projected/268318c6-efb7-4c2c-b739-f60b6cf567a4-kube-api-access-blj92\") pod \"certified-operators-5frzn\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:14 crc kubenswrapper[4684]: I1013 13:31:14.090330 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:14 crc kubenswrapper[4684]: I1013 13:31:14.637185 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5frzn"] Oct 13 13:31:14 crc kubenswrapper[4684]: I1013 13:31:14.824681 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5frzn" event={"ID":"268318c6-efb7-4c2c-b739-f60b6cf567a4","Type":"ContainerStarted","Data":"f77041a5b1d51a345e490f0246d9ff6e7b3cbe639d3c1e36ff3b636019840e06"} Oct 13 13:31:15 crc kubenswrapper[4684]: I1013 13:31:15.833933 4684 generic.go:334] "Generic (PLEG): container finished" podID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerID="3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a" exitCode=0 Oct 13 13:31:15 crc kubenswrapper[4684]: I1013 13:31:15.833972 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5frzn" event={"ID":"268318c6-efb7-4c2c-b739-f60b6cf567a4","Type":"ContainerDied","Data":"3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a"} Oct 13 13:31:18 crc kubenswrapper[4684]: I1013 13:31:18.861756 4684 generic.go:334] "Generic (PLEG): container finished" podID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerID="3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8" exitCode=0 Oct 13 13:31:18 crc kubenswrapper[4684]: I1013 13:31:18.861805 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5frzn" event={"ID":"268318c6-efb7-4c2c-b739-f60b6cf567a4","Type":"ContainerDied","Data":"3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8"} Oct 13 13:31:19 crc kubenswrapper[4684]: I1013 13:31:19.876699 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5frzn" event={"ID":"268318c6-efb7-4c2c-b739-f60b6cf567a4","Type":"ContainerStarted","Data":"02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f"} Oct 13 13:31:24 crc kubenswrapper[4684]: I1013 13:31:24.090852 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:24 crc kubenswrapper[4684]: I1013 13:31:24.091224 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:24 crc kubenswrapper[4684]: I1013 13:31:24.137648 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:24 crc kubenswrapper[4684]: I1013 13:31:24.166352 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5frzn" podStartSLOduration=7.685278052 podStartE2EDuration="11.166337096s" podCreationTimestamp="2025-10-13 13:31:13 +0000 UTC" firstStartedPulling="2025-10-13 13:31:15.836007168 +0000 UTC m=+1430.403391238" lastFinishedPulling="2025-10-13 13:31:19.317066172 +0000 UTC m=+1433.884450282" observedRunningTime="2025-10-13 13:31:19.900547076 +0000 UTC m=+1434.467931146" watchObservedRunningTime="2025-10-13 13:31:24.166337096 +0000 UTC m=+1438.733721166" Oct 13 13:31:24 crc kubenswrapper[4684]: I1013 13:31:24.928303 4684 generic.go:334] "Generic (PLEG): container finished" podID="4bf002ba-ce9a-40ad-a860-0572fc61d996" containerID="ab27e5f802cfc4afd92a2e3fea3936b8603ec3d61307de2ce3eede4ab6d97660" exitCode=0 Oct 13 13:31:24 crc kubenswrapper[4684]: I1013 13:31:24.928364 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" event={"ID":"4bf002ba-ce9a-40ad-a860-0572fc61d996","Type":"ContainerDied","Data":"ab27e5f802cfc4afd92a2e3fea3936b8603ec3d61307de2ce3eede4ab6d97660"} Oct 13 13:31:25 crc kubenswrapper[4684]: I1013 13:31:25.002837 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:25 crc kubenswrapper[4684]: I1013 13:31:25.052217 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5frzn"] Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.383702 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.483947 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-inventory\") pod \"4bf002ba-ce9a-40ad-a860-0572fc61d996\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.484009 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-ssh-key\") pod \"4bf002ba-ce9a-40ad-a860-0572fc61d996\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.484082 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7brn5\" (UniqueName: \"kubernetes.io/projected/4bf002ba-ce9a-40ad-a860-0572fc61d996-kube-api-access-7brn5\") pod \"4bf002ba-ce9a-40ad-a860-0572fc61d996\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.484186 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-bootstrap-combined-ca-bundle\") pod \"4bf002ba-ce9a-40ad-a860-0572fc61d996\" (UID: \"4bf002ba-ce9a-40ad-a860-0572fc61d996\") " Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.490079 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4bf002ba-ce9a-40ad-a860-0572fc61d996" (UID: "4bf002ba-ce9a-40ad-a860-0572fc61d996"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.490217 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bf002ba-ce9a-40ad-a860-0572fc61d996-kube-api-access-7brn5" (OuterVolumeSpecName: "kube-api-access-7brn5") pod "4bf002ba-ce9a-40ad-a860-0572fc61d996" (UID: "4bf002ba-ce9a-40ad-a860-0572fc61d996"). InnerVolumeSpecName "kube-api-access-7brn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.514125 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-inventory" (OuterVolumeSpecName: "inventory") pod "4bf002ba-ce9a-40ad-a860-0572fc61d996" (UID: "4bf002ba-ce9a-40ad-a860-0572fc61d996"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.522694 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4bf002ba-ce9a-40ad-a860-0572fc61d996" (UID: "4bf002ba-ce9a-40ad-a860-0572fc61d996"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.586286 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.586323 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.586335 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7brn5\" (UniqueName: \"kubernetes.io/projected/4bf002ba-ce9a-40ad-a860-0572fc61d996-kube-api-access-7brn5\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.586349 4684 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bf002ba-ce9a-40ad-a860-0572fc61d996-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.945036 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" event={"ID":"4bf002ba-ce9a-40ad-a860-0572fc61d996","Type":"ContainerDied","Data":"62c1ad5723a9d4592d03fbb7642bff42dd53eb98fcea02d179bffb576016dbbc"} Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.945078 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62c1ad5723a9d4592d03fbb7642bff42dd53eb98fcea02d179bffb576016dbbc" Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.945151 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5frzn" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="registry-server" containerID="cri-o://02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f" gracePeriod=2 Oct 13 13:31:26 crc kubenswrapper[4684]: I1013 13:31:26.945058 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.042249 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr"] Oct 13 13:31:27 crc kubenswrapper[4684]: E1013 13:31:27.042761 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bf002ba-ce9a-40ad-a860-0572fc61d996" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.042787 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bf002ba-ce9a-40ad-a860-0572fc61d996" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.043010 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bf002ba-ce9a-40ad-a860-0572fc61d996" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.043685 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.045707 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.045731 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.049694 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.051062 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.054138 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr"] Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.195713 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.196092 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8tsf\" (UniqueName: \"kubernetes.io/projected/6c328864-8f33-4897-8fa7-9f0feee4fbf9-kube-api-access-j8tsf\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.196130 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.298375 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8tsf\" (UniqueName: \"kubernetes.io/projected/6c328864-8f33-4897-8fa7-9f0feee4fbf9-kube-api-access-j8tsf\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.298506 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.298607 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.303811 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.313817 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8tsf\" (UniqueName: \"kubernetes.io/projected/6c328864-8f33-4897-8fa7-9f0feee4fbf9-kube-api-access-j8tsf\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.316583 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-28bmr\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.405304 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.406514 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.501743 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-catalog-content\") pod \"268318c6-efb7-4c2c-b739-f60b6cf567a4\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.501886 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-utilities\") pod \"268318c6-efb7-4c2c-b739-f60b6cf567a4\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.502132 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blj92\" (UniqueName: \"kubernetes.io/projected/268318c6-efb7-4c2c-b739-f60b6cf567a4-kube-api-access-blj92\") pod \"268318c6-efb7-4c2c-b739-f60b6cf567a4\" (UID: \"268318c6-efb7-4c2c-b739-f60b6cf567a4\") " Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.504589 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-utilities" (OuterVolumeSpecName: "utilities") pod "268318c6-efb7-4c2c-b739-f60b6cf567a4" (UID: "268318c6-efb7-4c2c-b739-f60b6cf567a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.506529 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/268318c6-efb7-4c2c-b739-f60b6cf567a4-kube-api-access-blj92" (OuterVolumeSpecName: "kube-api-access-blj92") pod "268318c6-efb7-4c2c-b739-f60b6cf567a4" (UID: "268318c6-efb7-4c2c-b739-f60b6cf567a4"). InnerVolumeSpecName "kube-api-access-blj92". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.580358 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "268318c6-efb7-4c2c-b739-f60b6cf567a4" (UID: "268318c6-efb7-4c2c-b739-f60b6cf567a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.604180 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.604224 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blj92\" (UniqueName: \"kubernetes.io/projected/268318c6-efb7-4c2c-b739-f60b6cf567a4-kube-api-access-blj92\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.604238 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/268318c6-efb7-4c2c-b739-f60b6cf567a4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.956140 4684 generic.go:334] "Generic (PLEG): container finished" podID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerID="02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f" exitCode=0 Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.956190 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5frzn" event={"ID":"268318c6-efb7-4c2c-b739-f60b6cf567a4","Type":"ContainerDied","Data":"02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f"} Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.956220 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5frzn" event={"ID":"268318c6-efb7-4c2c-b739-f60b6cf567a4","Type":"ContainerDied","Data":"f77041a5b1d51a345e490f0246d9ff6e7b3cbe639d3c1e36ff3b636019840e06"} Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.956238 4684 scope.go:117] "RemoveContainer" containerID="02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.956381 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5frzn" Oct 13 13:31:27 crc kubenswrapper[4684]: I1013 13:31:27.970427 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr"] Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.003283 4684 scope.go:117] "RemoveContainer" containerID="3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.003414 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5frzn"] Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.014462 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5frzn"] Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.029255 4684 scope.go:117] "RemoveContainer" containerID="3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.051412 4684 scope.go:117] "RemoveContainer" containerID="02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f" Oct 13 13:31:28 crc kubenswrapper[4684]: E1013 13:31:28.051916 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f\": container with ID starting with 02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f not found: ID does not exist" containerID="02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.051946 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f"} err="failed to get container status \"02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f\": rpc error: code = NotFound desc = could not find container \"02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f\": container with ID starting with 02b0ae5cd0f837ce867ad222d01e0dbc0c876169db70c44f08ecde25ea14863f not found: ID does not exist" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.051970 4684 scope.go:117] "RemoveContainer" containerID="3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8" Oct 13 13:31:28 crc kubenswrapper[4684]: E1013 13:31:28.052207 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8\": container with ID starting with 3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8 not found: ID does not exist" containerID="3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.052254 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8"} err="failed to get container status \"3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8\": rpc error: code = NotFound desc = could not find container \"3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8\": container with ID starting with 3066cbff7a918c30b0952a83aa8ef9853e96b4bed26f526c9c84c116af1ca5b8 not found: ID does not exist" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.052270 4684 scope.go:117] "RemoveContainer" containerID="3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a" Oct 13 13:31:28 crc kubenswrapper[4684]: E1013 13:31:28.052636 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a\": container with ID starting with 3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a not found: ID does not exist" containerID="3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.052665 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a"} err="failed to get container status \"3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a\": rpc error: code = NotFound desc = could not find container \"3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a\": container with ID starting with 3b02c5461340a64ba9edacd4766388eccc0db8149798ab84247e51791ba8dc9a not found: ID does not exist" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.364709 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" path="/var/lib/kubelet/pods/268318c6-efb7-4c2c-b739-f60b6cf567a4/volumes" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.785741 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rdjzl"] Oct 13 13:31:28 crc kubenswrapper[4684]: E1013 13:31:28.786889 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="extract-utilities" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.786921 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="extract-utilities" Oct 13 13:31:28 crc kubenswrapper[4684]: E1013 13:31:28.786931 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="extract-content" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.786938 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="extract-content" Oct 13 13:31:28 crc kubenswrapper[4684]: E1013 13:31:28.786962 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="registry-server" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.786967 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="registry-server" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.787174 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="268318c6-efb7-4c2c-b739-f60b6cf567a4" containerName="registry-server" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.788571 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.810338 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rdjzl"] Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.933456 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-utilities\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.933542 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-catalog-content\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.933748 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpnlz\" (UniqueName: \"kubernetes.io/projected/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-kube-api-access-cpnlz\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.967733 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" event={"ID":"6c328864-8f33-4897-8fa7-9f0feee4fbf9","Type":"ContainerStarted","Data":"bea9365cd33a636f86511d9bafcd9b7eb2290efaf639765e2f35bc696f01fd0d"} Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.967774 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" event={"ID":"6c328864-8f33-4897-8fa7-9f0feee4fbf9","Type":"ContainerStarted","Data":"ac1fc9d5e78ee31388e46374b904b6a9cebfe4c96d31191cd72c5170386bfe6d"} Oct 13 13:31:28 crc kubenswrapper[4684]: I1013 13:31:28.988494 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" podStartSLOduration=1.506133835 podStartE2EDuration="1.988471696s" podCreationTimestamp="2025-10-13 13:31:27 +0000 UTC" firstStartedPulling="2025-10-13 13:31:28.003272044 +0000 UTC m=+1442.570656104" lastFinishedPulling="2025-10-13 13:31:28.485609905 +0000 UTC m=+1443.052993965" observedRunningTime="2025-10-13 13:31:28.985196906 +0000 UTC m=+1443.552580986" watchObservedRunningTime="2025-10-13 13:31:28.988471696 +0000 UTC m=+1443.555855776" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.035557 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-catalog-content\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.036175 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-catalog-content\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.036273 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpnlz\" (UniqueName: \"kubernetes.io/projected/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-kube-api-access-cpnlz\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.036686 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-utilities\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.036978 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-utilities\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.055994 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpnlz\" (UniqueName: \"kubernetes.io/projected/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-kube-api-access-cpnlz\") pod \"community-operators-rdjzl\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.113022 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.738019 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rdjzl"] Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.977621 4684 generic.go:334] "Generic (PLEG): container finished" podID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerID="dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4" exitCode=0 Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.978755 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerDied","Data":"dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4"} Oct 13 13:31:29 crc kubenswrapper[4684]: I1013 13:31:29.978781 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerStarted","Data":"eadb6754406e774ba18187badf16f72bd0aa0fd7ee7082375cfb5a151f62f946"} Oct 13 13:31:30 crc kubenswrapper[4684]: I1013 13:31:30.560289 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:31:30 crc kubenswrapper[4684]: I1013 13:31:30.560809 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:31:31 crc kubenswrapper[4684]: I1013 13:31:31.997664 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerStarted","Data":"503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e"} Oct 13 13:31:33 crc kubenswrapper[4684]: I1013 13:31:33.009595 4684 generic.go:334] "Generic (PLEG): container finished" podID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerID="503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e" exitCode=0 Oct 13 13:31:33 crc kubenswrapper[4684]: I1013 13:31:33.009641 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerDied","Data":"503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e"} Oct 13 13:31:34 crc kubenswrapper[4684]: I1013 13:31:34.026338 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerStarted","Data":"2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1"} Oct 13 13:31:34 crc kubenswrapper[4684]: I1013 13:31:34.050731 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rdjzl" podStartSLOduration=2.546131682 podStartE2EDuration="6.050709787s" podCreationTimestamp="2025-10-13 13:31:28 +0000 UTC" firstStartedPulling="2025-10-13 13:31:29.981188609 +0000 UTC m=+1444.548572679" lastFinishedPulling="2025-10-13 13:31:33.485766724 +0000 UTC m=+1448.053150784" observedRunningTime="2025-10-13 13:31:34.04427761 +0000 UTC m=+1448.611661710" watchObservedRunningTime="2025-10-13 13:31:34.050709787 +0000 UTC m=+1448.618093867" Oct 13 13:31:39 crc kubenswrapper[4684]: I1013 13:31:39.113871 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:39 crc kubenswrapper[4684]: I1013 13:31:39.114529 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:39 crc kubenswrapper[4684]: I1013 13:31:39.162315 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:40 crc kubenswrapper[4684]: I1013 13:31:40.144492 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:40 crc kubenswrapper[4684]: I1013 13:31:40.215854 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rdjzl"] Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.098405 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rdjzl" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="registry-server" containerID="cri-o://2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1" gracePeriod=2 Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.570293 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.762947 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-catalog-content\") pod \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.763013 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-utilities\") pod \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.763061 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpnlz\" (UniqueName: \"kubernetes.io/projected/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-kube-api-access-cpnlz\") pod \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\" (UID: \"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2\") " Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.764095 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-utilities" (OuterVolumeSpecName: "utilities") pod "f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" (UID: "f1e52dde-ced7-4436-bdd0-7d03da3c5ea2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.776946 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-kube-api-access-cpnlz" (OuterVolumeSpecName: "kube-api-access-cpnlz") pod "f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" (UID: "f1e52dde-ced7-4436-bdd0-7d03da3c5ea2"). InnerVolumeSpecName "kube-api-access-cpnlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.807840 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" (UID: "f1e52dde-ced7-4436-bdd0-7d03da3c5ea2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.865978 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.866287 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:42 crc kubenswrapper[4684]: I1013 13:31:42.866300 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpnlz\" (UniqueName: \"kubernetes.io/projected/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2-kube-api-access-cpnlz\") on node \"crc\" DevicePath \"\"" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.112120 4684 generic.go:334] "Generic (PLEG): container finished" podID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerID="2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1" exitCode=0 Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.112170 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerDied","Data":"2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1"} Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.112177 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rdjzl" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.112214 4684 scope.go:117] "RemoveContainer" containerID="2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.112201 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rdjzl" event={"ID":"f1e52dde-ced7-4436-bdd0-7d03da3c5ea2","Type":"ContainerDied","Data":"eadb6754406e774ba18187badf16f72bd0aa0fd7ee7082375cfb5a151f62f946"} Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.143774 4684 scope.go:117] "RemoveContainer" containerID="503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.162346 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rdjzl"] Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.172605 4684 scope.go:117] "RemoveContainer" containerID="dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.175954 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rdjzl"] Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.228402 4684 scope.go:117] "RemoveContainer" containerID="2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1" Oct 13 13:31:43 crc kubenswrapper[4684]: E1013 13:31:43.228941 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1\": container with ID starting with 2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1 not found: ID does not exist" containerID="2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.228974 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1"} err="failed to get container status \"2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1\": rpc error: code = NotFound desc = could not find container \"2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1\": container with ID starting with 2eb1c215dbf7cf1d9cfa72d523505334b931817511c054b0113dcfc64d18bba1 not found: ID does not exist" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.228993 4684 scope.go:117] "RemoveContainer" containerID="503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e" Oct 13 13:31:43 crc kubenswrapper[4684]: E1013 13:31:43.229323 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e\": container with ID starting with 503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e not found: ID does not exist" containerID="503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.229346 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e"} err="failed to get container status \"503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e\": rpc error: code = NotFound desc = could not find container \"503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e\": container with ID starting with 503455a3e90f416839fb68af0592a9bd0a2031e75286022ab710dd611bf55f0e not found: ID does not exist" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.229359 4684 scope.go:117] "RemoveContainer" containerID="dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4" Oct 13 13:31:43 crc kubenswrapper[4684]: E1013 13:31:43.229688 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4\": container with ID starting with dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4 not found: ID does not exist" containerID="dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4" Oct 13 13:31:43 crc kubenswrapper[4684]: I1013 13:31:43.229712 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4"} err="failed to get container status \"dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4\": rpc error: code = NotFound desc = could not find container \"dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4\": container with ID starting with dc18ca6f16388481915dbc995321de628057cfb8b54f5b615c3094f52d5655d4 not found: ID does not exist" Oct 13 13:31:43 crc kubenswrapper[4684]: E1013 13:31:43.288835 4684 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1e52dde_ced7_4436_bdd0_7d03da3c5ea2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1e52dde_ced7_4436_bdd0_7d03da3c5ea2.slice/crio-eadb6754406e774ba18187badf16f72bd0aa0fd7ee7082375cfb5a151f62f946\": RecentStats: unable to find data in memory cache]" Oct 13 13:31:44 crc kubenswrapper[4684]: I1013 13:31:44.361728 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" path="/var/lib/kubelet/pods/f1e52dde-ced7-4436-bdd0-7d03da3c5ea2/volumes" Oct 13 13:32:00 crc kubenswrapper[4684]: I1013 13:32:00.559978 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:32:00 crc kubenswrapper[4684]: I1013 13:32:00.560543 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:32:10 crc kubenswrapper[4684]: I1013 13:32:10.065234 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-wzmnn"] Oct 13 13:32:10 crc kubenswrapper[4684]: I1013 13:32:10.084763 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-wzmnn"] Oct 13 13:32:10 crc kubenswrapper[4684]: I1013 13:32:10.410030 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e77f496-7c3a-4769-847c-216d887734f9" path="/var/lib/kubelet/pods/9e77f496-7c3a-4769-847c-216d887734f9/volumes" Oct 13 13:32:11 crc kubenswrapper[4684]: I1013 13:32:11.028460 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9h7cm"] Oct 13 13:32:11 crc kubenswrapper[4684]: I1013 13:32:11.037477 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9h7cm"] Oct 13 13:32:12 crc kubenswrapper[4684]: I1013 13:32:12.363823 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ca28019-9f27-416e-a635-d87fb09c55ae" path="/var/lib/kubelet/pods/7ca28019-9f27-416e-a635-d87fb09c55ae/volumes" Oct 13 13:32:16 crc kubenswrapper[4684]: I1013 13:32:16.048203 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-2lzdc"] Oct 13 13:32:16 crc kubenswrapper[4684]: I1013 13:32:16.057085 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-2lzdc"] Oct 13 13:32:16 crc kubenswrapper[4684]: I1013 13:32:16.381525 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfd212bc-f05d-4d46-97f4-a23ac84b4ca6" path="/var/lib/kubelet/pods/bfd212bc-f05d-4d46-97f4-a23ac84b4ca6/volumes" Oct 13 13:32:20 crc kubenswrapper[4684]: I1013 13:32:20.029705 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-08cf-account-create-4hrzk"] Oct 13 13:32:20 crc kubenswrapper[4684]: I1013 13:32:20.040078 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-3fce-account-create-rhspt"] Oct 13 13:32:20 crc kubenswrapper[4684]: I1013 13:32:20.049429 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-08cf-account-create-4hrzk"] Oct 13 13:32:20 crc kubenswrapper[4684]: I1013 13:32:20.056788 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-3fce-account-create-rhspt"] Oct 13 13:32:20 crc kubenswrapper[4684]: I1013 13:32:20.362966 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847ba3c2-e9b1-4b08-a008-d984bf6fbf35" path="/var/lib/kubelet/pods/847ba3c2-e9b1-4b08-a008-d984bf6fbf35/volumes" Oct 13 13:32:20 crc kubenswrapper[4684]: I1013 13:32:20.364133 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a68a5492-b367-4376-9328-4721993efa19" path="/var/lib/kubelet/pods/a68a5492-b367-4376-9328-4721993efa19/volumes" Oct 13 13:32:26 crc kubenswrapper[4684]: I1013 13:32:26.026147 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-ffac-account-create-8lkzt"] Oct 13 13:32:26 crc kubenswrapper[4684]: I1013 13:32:26.034196 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-ffac-account-create-8lkzt"] Oct 13 13:32:26 crc kubenswrapper[4684]: I1013 13:32:26.363141 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9779ffc7-b9ee-41fe-9f44-72dc7e9439b6" path="/var/lib/kubelet/pods/9779ffc7-b9ee-41fe-9f44-72dc7e9439b6/volumes" Oct 13 13:32:30 crc kubenswrapper[4684]: I1013 13:32:30.560209 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:32:30 crc kubenswrapper[4684]: I1013 13:32:30.560708 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:32:30 crc kubenswrapper[4684]: I1013 13:32:30.560752 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:32:30 crc kubenswrapper[4684]: I1013 13:32:30.561461 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:32:30 crc kubenswrapper[4684]: I1013 13:32:30.561512 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" gracePeriod=600 Oct 13 13:32:30 crc kubenswrapper[4684]: E1013 13:32:30.686518 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:32:31 crc kubenswrapper[4684]: I1013 13:32:31.667254 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" exitCode=0 Oct 13 13:32:31 crc kubenswrapper[4684]: I1013 13:32:31.667330 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150"} Oct 13 13:32:31 crc kubenswrapper[4684]: I1013 13:32:31.667588 4684 scope.go:117] "RemoveContainer" containerID="d2e805361345d8e27af996006522c59e8ccaab786312d030265294583995b587" Oct 13 13:32:31 crc kubenswrapper[4684]: I1013 13:32:31.668253 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:32:31 crc kubenswrapper[4684]: E1013 13:32:31.668532 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:32:38 crc kubenswrapper[4684]: I1013 13:32:38.031125 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-5llnp"] Oct 13 13:32:38 crc kubenswrapper[4684]: I1013 13:32:38.038663 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-tg589"] Oct 13 13:32:38 crc kubenswrapper[4684]: I1013 13:32:38.048108 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-5llnp"] Oct 13 13:32:38 crc kubenswrapper[4684]: I1013 13:32:38.055279 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-tg589"] Oct 13 13:32:38 crc kubenswrapper[4684]: I1013 13:32:38.369820 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="269bbce5-67e7-4869-bda7-876636e2faa4" path="/var/lib/kubelet/pods/269bbce5-67e7-4869-bda7-876636e2faa4/volumes" Oct 13 13:32:38 crc kubenswrapper[4684]: I1013 13:32:38.371076 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd" path="/var/lib/kubelet/pods/7a076a1b-1e30-4dbe-b2fe-403bfb63c4dd/volumes" Oct 13 13:32:39 crc kubenswrapper[4684]: I1013 13:32:39.054685 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-mjjht"] Oct 13 13:32:39 crc kubenswrapper[4684]: I1013 13:32:39.066556 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-mjjht"] Oct 13 13:32:40 crc kubenswrapper[4684]: I1013 13:32:40.368156 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c2ba48c-f56e-4f6e-9992-d52a628c86fb" path="/var/lib/kubelet/pods/8c2ba48c-f56e-4f6e-9992-d52a628c86fb/volumes" Oct 13 13:32:45 crc kubenswrapper[4684]: I1013 13:32:45.351450 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:32:45 crc kubenswrapper[4684]: E1013 13:32:45.352308 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.361469 4684 scope.go:117] "RemoveContainer" containerID="b9738c706f605757802e626de1c6fbf4289aa110f41359da29ae662cf861c906" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.403057 4684 scope.go:117] "RemoveContainer" containerID="a9444f290b3db177eaa26302c17d6046b7b9cae9be686bb47ca1c3a3b11b7111" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.442033 4684 scope.go:117] "RemoveContainer" containerID="4c0b4003decb737fb839dd773ff181c99716d767ea36001bd6fb90544141fa89" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.506324 4684 scope.go:117] "RemoveContainer" containerID="9993c37055b4739e2176c7c266094f594d843aebcc56b3dc71b28360b7c2e2c6" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.545287 4684 scope.go:117] "RemoveContainer" containerID="f0bdc6a9ecc252c5b66b04515ef439030db7f082df3039f27a82813010002cb8" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.574780 4684 scope.go:117] "RemoveContainer" containerID="f00b3382948736f20cefdae7267497dd4ed2c0cf4a454c2b1429ed9b1ac3eebc" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.616112 4684 scope.go:117] "RemoveContainer" containerID="da439ed2cdba43553d44bdaa42cce2dd97a5477d9e04eca406a370276789357d" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.645825 4684 scope.go:117] "RemoveContainer" containerID="398fa3eb9c985fef6389eca9c50733e32393905d74dd4d4bd80254b36dfb97ae" Oct 13 13:32:48 crc kubenswrapper[4684]: I1013 13:32:48.670389 4684 scope.go:117] "RemoveContainer" containerID="2bc95d753cc3bdb79c2c7cffba6dd39270aa1eee92b20e2782ec25208cae055f" Oct 13 13:32:50 crc kubenswrapper[4684]: I1013 13:32:50.051993 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6f5e-account-create-8zhf2"] Oct 13 13:32:50 crc kubenswrapper[4684]: I1013 13:32:50.061736 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6f5e-account-create-8zhf2"] Oct 13 13:32:50 crc kubenswrapper[4684]: I1013 13:32:50.363551 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09785215-874e-4d3f-b431-4d4df98e31c8" path="/var/lib/kubelet/pods/09785215-874e-4d3f-b431-4d4df98e31c8/volumes" Oct 13 13:32:51 crc kubenswrapper[4684]: I1013 13:32:51.037980 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-5b32-account-create-czfqk"] Oct 13 13:32:51 crc kubenswrapper[4684]: I1013 13:32:51.052292 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-5b32-account-create-czfqk"] Oct 13 13:32:51 crc kubenswrapper[4684]: I1013 13:32:51.060703 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1da3-account-create-tczs9"] Oct 13 13:32:51 crc kubenswrapper[4684]: I1013 13:32:51.068736 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1da3-account-create-tczs9"] Oct 13 13:32:52 crc kubenswrapper[4684]: I1013 13:32:52.367779 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41c878ea-21d2-4f4d-9628-d7bb8a46c1e0" path="/var/lib/kubelet/pods/41c878ea-21d2-4f4d-9628-d7bb8a46c1e0/volumes" Oct 13 13:32:52 crc kubenswrapper[4684]: I1013 13:32:52.371286 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="491293d3-586b-4691-91fd-e1c6e51144bb" path="/var/lib/kubelet/pods/491293d3-586b-4691-91fd-e1c6e51144bb/volumes" Oct 13 13:32:53 crc kubenswrapper[4684]: I1013 13:32:53.039138 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4ddbr"] Oct 13 13:32:53 crc kubenswrapper[4684]: I1013 13:32:53.051743 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4ddbr"] Oct 13 13:32:54 crc kubenswrapper[4684]: I1013 13:32:54.367883 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28fe7aea-16bf-46f3-b8b0-6b322c4ffecf" path="/var/lib/kubelet/pods/28fe7aea-16bf-46f3-b8b0-6b322c4ffecf/volumes" Oct 13 13:32:55 crc kubenswrapper[4684]: I1013 13:32:55.031526 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-hh8w7"] Oct 13 13:32:55 crc kubenswrapper[4684]: I1013 13:32:55.039754 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-hh8w7"] Oct 13 13:32:56 crc kubenswrapper[4684]: I1013 13:32:56.369024 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe" path="/var/lib/kubelet/pods/f2c2b0a0-2d1c-4b25-b223-a0ff24d807fe/volumes" Oct 13 13:32:58 crc kubenswrapper[4684]: I1013 13:32:58.354325 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:32:58 crc kubenswrapper[4684]: E1013 13:32:58.357709 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.611106 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-74m2v"] Oct 13 13:32:59 crc kubenswrapper[4684]: E1013 13:32:59.611790 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="registry-server" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.611805 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="registry-server" Oct 13 13:32:59 crc kubenswrapper[4684]: E1013 13:32:59.611825 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="extract-utilities" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.611832 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="extract-utilities" Oct 13 13:32:59 crc kubenswrapper[4684]: E1013 13:32:59.611840 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="extract-content" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.611847 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="extract-content" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.612085 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e52dde-ced7-4436-bdd0-7d03da3c5ea2" containerName="registry-server" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.613464 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.618890 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-74m2v"] Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.785263 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-catalog-content\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.785357 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vg7z\" (UniqueName: \"kubernetes.io/projected/c045ada2-5b0b-47d5-941d-de7b20111580-kube-api-access-9vg7z\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.785404 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-utilities\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.886947 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-utilities\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.887074 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-catalog-content\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.887142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vg7z\" (UniqueName: \"kubernetes.io/projected/c045ada2-5b0b-47d5-941d-de7b20111580-kube-api-access-9vg7z\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.887727 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-utilities\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.887764 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-catalog-content\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.909121 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vg7z\" (UniqueName: \"kubernetes.io/projected/c045ada2-5b0b-47d5-941d-de7b20111580-kube-api-access-9vg7z\") pod \"redhat-marketplace-74m2v\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:32:59 crc kubenswrapper[4684]: I1013 13:32:59.935810 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:00 crc kubenswrapper[4684]: I1013 13:33:00.399628 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-74m2v"] Oct 13 13:33:00 crc kubenswrapper[4684]: I1013 13:33:00.980854 4684 generic.go:334] "Generic (PLEG): container finished" podID="c045ada2-5b0b-47d5-941d-de7b20111580" containerID="e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c" exitCode=0 Oct 13 13:33:00 crc kubenswrapper[4684]: I1013 13:33:00.981516 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74m2v" event={"ID":"c045ada2-5b0b-47d5-941d-de7b20111580","Type":"ContainerDied","Data":"e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c"} Oct 13 13:33:00 crc kubenswrapper[4684]: I1013 13:33:00.983198 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74m2v" event={"ID":"c045ada2-5b0b-47d5-941d-de7b20111580","Type":"ContainerStarted","Data":"324a1cf6b5f14e82e1b082161e72bdb418cc0c3a60a4b87e55eb12ba5b1190cf"} Oct 13 13:33:00 crc kubenswrapper[4684]: I1013 13:33:00.984497 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:33:03 crc kubenswrapper[4684]: I1013 13:33:03.010533 4684 generic.go:334] "Generic (PLEG): container finished" podID="c045ada2-5b0b-47d5-941d-de7b20111580" containerID="9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249" exitCode=0 Oct 13 13:33:03 crc kubenswrapper[4684]: I1013 13:33:03.010887 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74m2v" event={"ID":"c045ada2-5b0b-47d5-941d-de7b20111580","Type":"ContainerDied","Data":"9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249"} Oct 13 13:33:04 crc kubenswrapper[4684]: I1013 13:33:04.025726 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74m2v" event={"ID":"c045ada2-5b0b-47d5-941d-de7b20111580","Type":"ContainerStarted","Data":"e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127"} Oct 13 13:33:04 crc kubenswrapper[4684]: I1013 13:33:04.051252 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-74m2v" podStartSLOduration=2.441464349 podStartE2EDuration="5.051229617s" podCreationTimestamp="2025-10-13 13:32:59 +0000 UTC" firstStartedPulling="2025-10-13 13:33:00.98415523 +0000 UTC m=+1535.551539300" lastFinishedPulling="2025-10-13 13:33:03.593920478 +0000 UTC m=+1538.161304568" observedRunningTime="2025-10-13 13:33:04.047451101 +0000 UTC m=+1538.614835161" watchObservedRunningTime="2025-10-13 13:33:04.051229617 +0000 UTC m=+1538.618613697" Oct 13 13:33:09 crc kubenswrapper[4684]: I1013 13:33:09.936938 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:09 crc kubenswrapper[4684]: I1013 13:33:09.938922 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:10 crc kubenswrapper[4684]: I1013 13:33:10.002742 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:10 crc kubenswrapper[4684]: I1013 13:33:10.133930 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:10 crc kubenswrapper[4684]: I1013 13:33:10.243252 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-74m2v"] Oct 13 13:33:11 crc kubenswrapper[4684]: I1013 13:33:11.350617 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:33:11 crc kubenswrapper[4684]: E1013 13:33:11.351202 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.105645 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-74m2v" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="registry-server" containerID="cri-o://e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127" gracePeriod=2 Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.547309 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.675476 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-utilities\") pod \"c045ada2-5b0b-47d5-941d-de7b20111580\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.675661 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vg7z\" (UniqueName: \"kubernetes.io/projected/c045ada2-5b0b-47d5-941d-de7b20111580-kube-api-access-9vg7z\") pod \"c045ada2-5b0b-47d5-941d-de7b20111580\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.675685 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-catalog-content\") pod \"c045ada2-5b0b-47d5-941d-de7b20111580\" (UID: \"c045ada2-5b0b-47d5-941d-de7b20111580\") " Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.676305 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-utilities" (OuterVolumeSpecName: "utilities") pod "c045ada2-5b0b-47d5-941d-de7b20111580" (UID: "c045ada2-5b0b-47d5-941d-de7b20111580"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.680814 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c045ada2-5b0b-47d5-941d-de7b20111580-kube-api-access-9vg7z" (OuterVolumeSpecName: "kube-api-access-9vg7z") pod "c045ada2-5b0b-47d5-941d-de7b20111580" (UID: "c045ada2-5b0b-47d5-941d-de7b20111580"). InnerVolumeSpecName "kube-api-access-9vg7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.691489 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c045ada2-5b0b-47d5-941d-de7b20111580" (UID: "c045ada2-5b0b-47d5-941d-de7b20111580"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.777260 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.777291 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vg7z\" (UniqueName: \"kubernetes.io/projected/c045ada2-5b0b-47d5-941d-de7b20111580-kube-api-access-9vg7z\") on node \"crc\" DevicePath \"\"" Oct 13 13:33:12 crc kubenswrapper[4684]: I1013 13:33:12.777302 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c045ada2-5b0b-47d5-941d-de7b20111580-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.120645 4684 generic.go:334] "Generic (PLEG): container finished" podID="c045ada2-5b0b-47d5-941d-de7b20111580" containerID="e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127" exitCode=0 Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.120703 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74m2v" event={"ID":"c045ada2-5b0b-47d5-941d-de7b20111580","Type":"ContainerDied","Data":"e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127"} Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.120734 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74m2v" event={"ID":"c045ada2-5b0b-47d5-941d-de7b20111580","Type":"ContainerDied","Data":"324a1cf6b5f14e82e1b082161e72bdb418cc0c3a60a4b87e55eb12ba5b1190cf"} Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.120774 4684 scope.go:117] "RemoveContainer" containerID="e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.120943 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74m2v" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.157714 4684 scope.go:117] "RemoveContainer" containerID="9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.175019 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-74m2v"] Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.192564 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-74m2v"] Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.194299 4684 scope.go:117] "RemoveContainer" containerID="e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.227931 4684 scope.go:117] "RemoveContainer" containerID="e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127" Oct 13 13:33:13 crc kubenswrapper[4684]: E1013 13:33:13.228541 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127\": container with ID starting with e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127 not found: ID does not exist" containerID="e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.228714 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127"} err="failed to get container status \"e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127\": rpc error: code = NotFound desc = could not find container \"e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127\": container with ID starting with e183f0d4f99df3521df9e8b9882b4ec8f40481d438c0c11cbf8af16bd74b2127 not found: ID does not exist" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.228864 4684 scope.go:117] "RemoveContainer" containerID="9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249" Oct 13 13:33:13 crc kubenswrapper[4684]: E1013 13:33:13.230031 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249\": container with ID starting with 9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249 not found: ID does not exist" containerID="9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.230106 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249"} err="failed to get container status \"9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249\": rpc error: code = NotFound desc = could not find container \"9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249\": container with ID starting with 9908e19a4d6bb99f4ffbc3af9d4c6cab1ee12ee0980a9cabdff55549c7aa1249 not found: ID does not exist" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.230148 4684 scope.go:117] "RemoveContainer" containerID="e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c" Oct 13 13:33:13 crc kubenswrapper[4684]: E1013 13:33:13.230534 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c\": container with ID starting with e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c not found: ID does not exist" containerID="e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c" Oct 13 13:33:13 crc kubenswrapper[4684]: I1013 13:33:13.230573 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c"} err="failed to get container status \"e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c\": rpc error: code = NotFound desc = could not find container \"e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c\": container with ID starting with e039c9176610846f9a0bcbc1b8acf99a2f9c86cf2fceae90fa665e75157f0a8c not found: ID does not exist" Oct 13 13:33:14 crc kubenswrapper[4684]: I1013 13:33:14.363858 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" path="/var/lib/kubelet/pods/c045ada2-5b0b-47d5-941d-de7b20111580/volumes" Oct 13 13:33:21 crc kubenswrapper[4684]: I1013 13:33:21.052807 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-6b5hs"] Oct 13 13:33:21 crc kubenswrapper[4684]: I1013 13:33:21.066208 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-6b5hs"] Oct 13 13:33:22 crc kubenswrapper[4684]: I1013 13:33:22.380004 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89604b0f-1f4f-440b-b779-a3d5fe0c4895" path="/var/lib/kubelet/pods/89604b0f-1f4f-440b-b779-a3d5fe0c4895/volumes" Oct 13 13:33:24 crc kubenswrapper[4684]: I1013 13:33:24.350835 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:33:24 crc kubenswrapper[4684]: E1013 13:33:24.351162 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:33:26 crc kubenswrapper[4684]: I1013 13:33:26.269141 4684 generic.go:334] "Generic (PLEG): container finished" podID="6c328864-8f33-4897-8fa7-9f0feee4fbf9" containerID="bea9365cd33a636f86511d9bafcd9b7eb2290efaf639765e2f35bc696f01fd0d" exitCode=0 Oct 13 13:33:26 crc kubenswrapper[4684]: I1013 13:33:26.269206 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" event={"ID":"6c328864-8f33-4897-8fa7-9f0feee4fbf9","Type":"ContainerDied","Data":"bea9365cd33a636f86511d9bafcd9b7eb2290efaf639765e2f35bc696f01fd0d"} Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.625240 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.690556 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8tsf\" (UniqueName: \"kubernetes.io/projected/6c328864-8f33-4897-8fa7-9f0feee4fbf9-kube-api-access-j8tsf\") pod \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.690637 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-inventory\") pod \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.690709 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-ssh-key\") pod \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\" (UID: \"6c328864-8f33-4897-8fa7-9f0feee4fbf9\") " Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.697159 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c328864-8f33-4897-8fa7-9f0feee4fbf9-kube-api-access-j8tsf" (OuterVolumeSpecName: "kube-api-access-j8tsf") pod "6c328864-8f33-4897-8fa7-9f0feee4fbf9" (UID: "6c328864-8f33-4897-8fa7-9f0feee4fbf9"). InnerVolumeSpecName "kube-api-access-j8tsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.726277 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6c328864-8f33-4897-8fa7-9f0feee4fbf9" (UID: "6c328864-8f33-4897-8fa7-9f0feee4fbf9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.736753 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-inventory" (OuterVolumeSpecName: "inventory") pod "6c328864-8f33-4897-8fa7-9f0feee4fbf9" (UID: "6c328864-8f33-4897-8fa7-9f0feee4fbf9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.793254 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8tsf\" (UniqueName: \"kubernetes.io/projected/6c328864-8f33-4897-8fa7-9f0feee4fbf9-kube-api-access-j8tsf\") on node \"crc\" DevicePath \"\"" Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.793300 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:33:27 crc kubenswrapper[4684]: I1013 13:33:27.793313 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c328864-8f33-4897-8fa7-9f0feee4fbf9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.290531 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" event={"ID":"6c328864-8f33-4897-8fa7-9f0feee4fbf9","Type":"ContainerDied","Data":"ac1fc9d5e78ee31388e46374b904b6a9cebfe4c96d31191cd72c5170386bfe6d"} Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.290570 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-28bmr" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.290584 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac1fc9d5e78ee31388e46374b904b6a9cebfe4c96d31191cd72c5170386bfe6d" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.384673 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg"] Oct 13 13:33:28 crc kubenswrapper[4684]: E1013 13:33:28.385178 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="registry-server" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.385202 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="registry-server" Oct 13 13:33:28 crc kubenswrapper[4684]: E1013 13:33:28.385228 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="extract-content" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.385241 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="extract-content" Oct 13 13:33:28 crc kubenswrapper[4684]: E1013 13:33:28.385266 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c328864-8f33-4897-8fa7-9f0feee4fbf9" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.385275 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c328864-8f33-4897-8fa7-9f0feee4fbf9" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 13 13:33:28 crc kubenswrapper[4684]: E1013 13:33:28.385293 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="extract-utilities" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.385305 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="extract-utilities" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.385545 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="c045ada2-5b0b-47d5-941d-de7b20111580" containerName="registry-server" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.385573 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c328864-8f33-4897-8fa7-9f0feee4fbf9" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.386483 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.389807 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.390599 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.390852 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.392202 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.393456 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg"] Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.426135 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.426269 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.426442 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zftmt\" (UniqueName: \"kubernetes.io/projected/58124363-4632-4dec-894c-b3c3c289a6f0-kube-api-access-zftmt\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.527766 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zftmt\" (UniqueName: \"kubernetes.io/projected/58124363-4632-4dec-894c-b3c3c289a6f0-kube-api-access-zftmt\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.527830 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.527882 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.533068 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.533160 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.556214 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zftmt\" (UniqueName: \"kubernetes.io/projected/58124363-4632-4dec-894c-b3c3c289a6f0-kube-api-access-zftmt\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-c67wg\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:28 crc kubenswrapper[4684]: I1013 13:33:28.728224 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.041626 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-5qfl5"] Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.055381 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-77dxj"] Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.063559 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-hjlvf"] Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.071496 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-77dxj"] Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.079060 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-5qfl5"] Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.086567 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-hjlvf"] Oct 13 13:33:29 crc kubenswrapper[4684]: I1013 13:33:29.312805 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg"] Oct 13 13:33:30 crc kubenswrapper[4684]: I1013 13:33:30.313235 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" event={"ID":"58124363-4632-4dec-894c-b3c3c289a6f0","Type":"ContainerStarted","Data":"d725bd6b4b61f24e14fc5cfc39bd5ac2e4b7a8b84538f66e8959156d0f8dd01c"} Oct 13 13:33:30 crc kubenswrapper[4684]: I1013 13:33:30.313503 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" event={"ID":"58124363-4632-4dec-894c-b3c3c289a6f0","Type":"ContainerStarted","Data":"fa51ea309b11ba6c67a6323a94863521a4b18a1a6d6f6feb527e14d3cf226213"} Oct 13 13:33:30 crc kubenswrapper[4684]: I1013 13:33:30.332296 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" podStartSLOduration=1.88274512 podStartE2EDuration="2.332276241s" podCreationTimestamp="2025-10-13 13:33:28 +0000 UTC" firstStartedPulling="2025-10-13 13:33:29.316735784 +0000 UTC m=+1563.884119854" lastFinishedPulling="2025-10-13 13:33:29.766266905 +0000 UTC m=+1564.333650975" observedRunningTime="2025-10-13 13:33:30.329574557 +0000 UTC m=+1564.896958647" watchObservedRunningTime="2025-10-13 13:33:30.332276241 +0000 UTC m=+1564.899660311" Oct 13 13:33:30 crc kubenswrapper[4684]: I1013 13:33:30.365149 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54" path="/var/lib/kubelet/pods/2e21b744-e8f6-4b2d-bf9d-aeb8e3ab4d54/volumes" Oct 13 13:33:30 crc kubenswrapper[4684]: I1013 13:33:30.367472 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4" path="/var/lib/kubelet/pods/6fd2f1bd-d9a0-4e63-8cba-a5a0225a13d4/volumes" Oct 13 13:33:30 crc kubenswrapper[4684]: I1013 13:33:30.368674 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f47ea9a-7261-4645-bcd9-7abf500d9501" path="/var/lib/kubelet/pods/8f47ea9a-7261-4645-bcd9-7abf500d9501/volumes" Oct 13 13:33:38 crc kubenswrapper[4684]: I1013 13:33:38.351209 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:33:38 crc kubenswrapper[4684]: E1013 13:33:38.352552 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:33:44 crc kubenswrapper[4684]: I1013 13:33:44.055646 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-qprxl"] Oct 13 13:33:44 crc kubenswrapper[4684]: I1013 13:33:44.065606 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-qprxl"] Oct 13 13:33:44 crc kubenswrapper[4684]: I1013 13:33:44.363594 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f3242f8-a0ba-4799-bd79-a0523603fb37" path="/var/lib/kubelet/pods/5f3242f8-a0ba-4799-bd79-a0523603fb37/volumes" Oct 13 13:33:48 crc kubenswrapper[4684]: I1013 13:33:48.907434 4684 scope.go:117] "RemoveContainer" containerID="b56c2d9ac88be19528c7691e4ec0b8289c36b013eeeaf78f989b9f1886313aa0" Oct 13 13:33:48 crc kubenswrapper[4684]: I1013 13:33:48.969731 4684 scope.go:117] "RemoveContainer" containerID="d86bba669b24e0efb4e75b5366b6906169a0aa4f232362b9c8e2f7ddfb6f833c" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.011221 4684 scope.go:117] "RemoveContainer" containerID="78ef1075f0458c623839fab524432d158f3df5c137f0212d7563108e2dca1f9c" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.037322 4684 scope.go:117] "RemoveContainer" containerID="f9469b06a0908d38711c53628bf743fb0c21a42e676091318f92f6db34e196c8" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.095440 4684 scope.go:117] "RemoveContainer" containerID="feb471e2132c1c04f1860174ba3ca61ca7f8301e1c27f216194bd0e88736e020" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.141873 4684 scope.go:117] "RemoveContainer" containerID="35c7e4e5887a719c3beddb46adb9bfc2847e91479b1f2a6ac878efe464b2b6df" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.183857 4684 scope.go:117] "RemoveContainer" containerID="ab3d5694d17c09d8f2464b90164a56fddc9fea25a825a0065c1309113c0a918d" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.222053 4684 scope.go:117] "RemoveContainer" containerID="829f092c2099c4972c9b40e38d85b3b258e27eb96fed5f3b889b333dd7427aed" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.253147 4684 scope.go:117] "RemoveContainer" containerID="3ea011a28c8ae539d6f2995a5c117d74dcb59518f0e6b77df5f2f8b8696b1478" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.304104 4684 scope.go:117] "RemoveContainer" containerID="b46416504f1b586dc2bb73f9658247786c100b6ec2e7f35fd88ac238039e50f5" Oct 13 13:33:49 crc kubenswrapper[4684]: I1013 13:33:49.350836 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:33:49 crc kubenswrapper[4684]: E1013 13:33:49.351673 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:34:02 crc kubenswrapper[4684]: I1013 13:34:02.350741 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:34:02 crc kubenswrapper[4684]: E1013 13:34:02.352750 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:34:13 crc kubenswrapper[4684]: I1013 13:34:13.353292 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:34:13 crc kubenswrapper[4684]: E1013 13:34:13.354680 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:34:25 crc kubenswrapper[4684]: I1013 13:34:25.351162 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:34:25 crc kubenswrapper[4684]: E1013 13:34:25.351832 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:34:31 crc kubenswrapper[4684]: I1013 13:34:31.047492 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-8tjn6"] Oct 13 13:34:31 crc kubenswrapper[4684]: I1013 13:34:31.060160 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-6hd9g"] Oct 13 13:34:31 crc kubenswrapper[4684]: I1013 13:34:31.071992 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-c2k7b"] Oct 13 13:34:31 crc kubenswrapper[4684]: I1013 13:34:31.078065 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-c2k7b"] Oct 13 13:34:31 crc kubenswrapper[4684]: I1013 13:34:31.086641 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-8tjn6"] Oct 13 13:34:31 crc kubenswrapper[4684]: I1013 13:34:31.092979 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-6hd9g"] Oct 13 13:34:32 crc kubenswrapper[4684]: I1013 13:34:32.364308 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="294e80af-0da4-4813-967e-9b972e56a5e2" path="/var/lib/kubelet/pods/294e80af-0da4-4813-967e-9b972e56a5e2/volumes" Oct 13 13:34:32 crc kubenswrapper[4684]: I1013 13:34:32.365448 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5d8d6f8-445c-44fc-bf63-ffb2b676a661" path="/var/lib/kubelet/pods/b5d8d6f8-445c-44fc-bf63-ffb2b676a661/volumes" Oct 13 13:34:32 crc kubenswrapper[4684]: I1013 13:34:32.366160 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e05a9c82-2148-4d4a-838d-3b4c98faff04" path="/var/lib/kubelet/pods/e05a9c82-2148-4d4a-838d-3b4c98faff04/volumes" Oct 13 13:34:37 crc kubenswrapper[4684]: I1013 13:34:37.350732 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:34:37 crc kubenswrapper[4684]: E1013 13:34:37.351405 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:34:41 crc kubenswrapper[4684]: I1013 13:34:41.037611 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-608c-account-create-p4w5f"] Oct 13 13:34:41 crc kubenswrapper[4684]: I1013 13:34:41.050507 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-dd65-account-create-c2w2g"] Oct 13 13:34:41 crc kubenswrapper[4684]: I1013 13:34:41.061703 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-dd65-account-create-c2w2g"] Oct 13 13:34:41 crc kubenswrapper[4684]: I1013 13:34:41.070941 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-608c-account-create-p4w5f"] Oct 13 13:34:42 crc kubenswrapper[4684]: I1013 13:34:42.025578 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-0354-account-create-995tj"] Oct 13 13:34:42 crc kubenswrapper[4684]: I1013 13:34:42.033527 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-0354-account-create-995tj"] Oct 13 13:34:42 crc kubenswrapper[4684]: I1013 13:34:42.367333 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b2d5063-943d-4cea-b1cd-fc841e23d897" path="/var/lib/kubelet/pods/6b2d5063-943d-4cea-b1cd-fc841e23d897/volumes" Oct 13 13:34:42 crc kubenswrapper[4684]: I1013 13:34:42.368238 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75fd1cdd-9db0-47d5-be3b-874bea0755b8" path="/var/lib/kubelet/pods/75fd1cdd-9db0-47d5-be3b-874bea0755b8/volumes" Oct 13 13:34:42 crc kubenswrapper[4684]: I1013 13:34:42.368783 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3904d99-f791-4d1d-b518-c5e367fa4d39" path="/var/lib/kubelet/pods/b3904d99-f791-4d1d-b518-c5e367fa4d39/volumes" Oct 13 13:34:44 crc kubenswrapper[4684]: I1013 13:34:44.086152 4684 generic.go:334] "Generic (PLEG): container finished" podID="58124363-4632-4dec-894c-b3c3c289a6f0" containerID="d725bd6b4b61f24e14fc5cfc39bd5ac2e4b7a8b84538f66e8959156d0f8dd01c" exitCode=0 Oct 13 13:34:44 crc kubenswrapper[4684]: I1013 13:34:44.086277 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" event={"ID":"58124363-4632-4dec-894c-b3c3c289a6f0","Type":"ContainerDied","Data":"d725bd6b4b61f24e14fc5cfc39bd5ac2e4b7a8b84538f66e8959156d0f8dd01c"} Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.504707 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.672549 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zftmt\" (UniqueName: \"kubernetes.io/projected/58124363-4632-4dec-894c-b3c3c289a6f0-kube-api-access-zftmt\") pod \"58124363-4632-4dec-894c-b3c3c289a6f0\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.672746 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-inventory\") pod \"58124363-4632-4dec-894c-b3c3c289a6f0\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.673343 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-ssh-key\") pod \"58124363-4632-4dec-894c-b3c3c289a6f0\" (UID: \"58124363-4632-4dec-894c-b3c3c289a6f0\") " Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.680563 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58124363-4632-4dec-894c-b3c3c289a6f0-kube-api-access-zftmt" (OuterVolumeSpecName: "kube-api-access-zftmt") pod "58124363-4632-4dec-894c-b3c3c289a6f0" (UID: "58124363-4632-4dec-894c-b3c3c289a6f0"). InnerVolumeSpecName "kube-api-access-zftmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.720250 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "58124363-4632-4dec-894c-b3c3c289a6f0" (UID: "58124363-4632-4dec-894c-b3c3c289a6f0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.746210 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-inventory" (OuterVolumeSpecName: "inventory") pod "58124363-4632-4dec-894c-b3c3c289a6f0" (UID: "58124363-4632-4dec-894c-b3c3c289a6f0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.775237 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.775347 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58124363-4632-4dec-894c-b3c3c289a6f0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:45 crc kubenswrapper[4684]: I1013 13:34:45.775402 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zftmt\" (UniqueName: \"kubernetes.io/projected/58124363-4632-4dec-894c-b3c3c289a6f0-kube-api-access-zftmt\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.107422 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" event={"ID":"58124363-4632-4dec-894c-b3c3c289a6f0","Type":"ContainerDied","Data":"fa51ea309b11ba6c67a6323a94863521a4b18a1a6d6f6feb527e14d3cf226213"} Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.107474 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa51ea309b11ba6c67a6323a94863521a4b18a1a6d6f6feb527e14d3cf226213" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.107515 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-c67wg" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.255692 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx"] Oct 13 13:34:46 crc kubenswrapper[4684]: E1013 13:34:46.256372 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58124363-4632-4dec-894c-b3c3c289a6f0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.256393 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="58124363-4632-4dec-894c-b3c3c289a6f0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.256749 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="58124363-4632-4dec-894c-b3c3c289a6f0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.257569 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.259516 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.259696 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.259818 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.260484 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.276330 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx"] Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.290064 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.290170 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.290297 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkz74\" (UniqueName: \"kubernetes.io/projected/12fac5a6-e3fd-4017-9eee-36aa43193b0c-kube-api-access-kkz74\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.366830 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tp972"] Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.369219 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.376153 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tp972"] Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.397545 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqkpd\" (UniqueName: \"kubernetes.io/projected/5ee75bd4-322f-417a-bec1-964d5d164ca1-kube-api-access-pqkpd\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.398355 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-utilities\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.398523 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.398668 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.398739 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-catalog-content\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.398865 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkz74\" (UniqueName: \"kubernetes.io/projected/12fac5a6-e3fd-4017-9eee-36aa43193b0c-kube-api-access-kkz74\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.410085 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.410085 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.437670 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkz74\" (UniqueName: \"kubernetes.io/projected/12fac5a6-e3fd-4017-9eee-36aa43193b0c-kube-api-access-kkz74\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.501061 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqkpd\" (UniqueName: \"kubernetes.io/projected/5ee75bd4-322f-417a-bec1-964d5d164ca1-kube-api-access-pqkpd\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.501130 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-utilities\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.501223 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-catalog-content\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.501733 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-utilities\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.501791 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-catalog-content\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.517029 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqkpd\" (UniqueName: \"kubernetes.io/projected/5ee75bd4-322f-417a-bec1-964d5d164ca1-kube-api-access-pqkpd\") pod \"redhat-operators-tp972\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.577181 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:46 crc kubenswrapper[4684]: I1013 13:34:46.814024 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:47 crc kubenswrapper[4684]: I1013 13:34:47.118699 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx"] Oct 13 13:34:47 crc kubenswrapper[4684]: I1013 13:34:47.259674 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tp972"] Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.130524 4684 generic.go:334] "Generic (PLEG): container finished" podID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerID="125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5" exitCode=0 Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.130614 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerDied","Data":"125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5"} Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.131017 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerStarted","Data":"9b75bb018d4c8090781a76291327d708aa5f7ce7fca4fbd6e6c8db0f40e877bf"} Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.133288 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" event={"ID":"12fac5a6-e3fd-4017-9eee-36aa43193b0c","Type":"ContainerStarted","Data":"d72311b6cd9fdd3b68ca2e19d59072f85ac4c281690c42f5972a91b090fd3164"} Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.133317 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" event={"ID":"12fac5a6-e3fd-4017-9eee-36aa43193b0c","Type":"ContainerStarted","Data":"fad8fb6f0f30c742b579419f2d416a3833ff71d8f5df2fd90e5f192ae5e0ee2c"} Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.189843 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" podStartSLOduration=1.583143632 podStartE2EDuration="2.189812569s" podCreationTimestamp="2025-10-13 13:34:46 +0000 UTC" firstStartedPulling="2025-10-13 13:34:47.127418462 +0000 UTC m=+1641.694802532" lastFinishedPulling="2025-10-13 13:34:47.734087389 +0000 UTC m=+1642.301471469" observedRunningTime="2025-10-13 13:34:48.183453343 +0000 UTC m=+1642.750837473" watchObservedRunningTime="2025-10-13 13:34:48.189812569 +0000 UTC m=+1642.757196679" Oct 13 13:34:48 crc kubenswrapper[4684]: I1013 13:34:48.353433 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:34:48 crc kubenswrapper[4684]: E1013 13:34:48.353790 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.147242 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerStarted","Data":"f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8"} Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.556543 4684 scope.go:117] "RemoveContainer" containerID="6d4677711d353c7285a580a77e5224b723257536c9d43c00b82286a3b4705ad9" Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.585670 4684 scope.go:117] "RemoveContainer" containerID="3e064b130b66e8b0bb2e506a5e2f540aae5436596f9e030fb8b2a22edde25736" Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.631651 4684 scope.go:117] "RemoveContainer" containerID="86d228b77a0ec0e333bcf0f082841ca4b49ea3c25006705656853854e4a167d0" Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.674837 4684 scope.go:117] "RemoveContainer" containerID="baf6392375a2875bf926e1581663f0db5c20a74bd07a88dfa31f292d9ff8d64a" Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.716663 4684 scope.go:117] "RemoveContainer" containerID="79a56616c82199d8db3ddc0e9c6c9f8f3624c72ddc74c92e668b484a0e099cc7" Oct 13 13:34:49 crc kubenswrapper[4684]: I1013 13:34:49.769800 4684 scope.go:117] "RemoveContainer" containerID="4e166dc4fd3f0fa6b6f665a1a176ba438a307d5a6e4a3c353272e4b4a790284e" Oct 13 13:34:50 crc kubenswrapper[4684]: I1013 13:34:50.162002 4684 generic.go:334] "Generic (PLEG): container finished" podID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerID="f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8" exitCode=0 Oct 13 13:34:50 crc kubenswrapper[4684]: I1013 13:34:50.162067 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerDied","Data":"f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8"} Oct 13 13:34:52 crc kubenswrapper[4684]: I1013 13:34:52.187321 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerStarted","Data":"966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d"} Oct 13 13:34:52 crc kubenswrapper[4684]: I1013 13:34:52.208054 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tp972" podStartSLOduration=3.177357422 podStartE2EDuration="6.208028068s" podCreationTimestamp="2025-10-13 13:34:46 +0000 UTC" firstStartedPulling="2025-10-13 13:34:48.133404432 +0000 UTC m=+1642.700788502" lastFinishedPulling="2025-10-13 13:34:51.164075078 +0000 UTC m=+1645.731459148" observedRunningTime="2025-10-13 13:34:52.206040487 +0000 UTC m=+1646.773424597" watchObservedRunningTime="2025-10-13 13:34:52.208028068 +0000 UTC m=+1646.775412158" Oct 13 13:34:53 crc kubenswrapper[4684]: I1013 13:34:53.197961 4684 generic.go:334] "Generic (PLEG): container finished" podID="12fac5a6-e3fd-4017-9eee-36aa43193b0c" containerID="d72311b6cd9fdd3b68ca2e19d59072f85ac4c281690c42f5972a91b090fd3164" exitCode=0 Oct 13 13:34:53 crc kubenswrapper[4684]: I1013 13:34:53.198033 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" event={"ID":"12fac5a6-e3fd-4017-9eee-36aa43193b0c","Type":"ContainerDied","Data":"d72311b6cd9fdd3b68ca2e19d59072f85ac4c281690c42f5972a91b090fd3164"} Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.653708 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.781296 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-ssh-key\") pod \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.781642 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-inventory\") pod \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.781696 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkz74\" (UniqueName: \"kubernetes.io/projected/12fac5a6-e3fd-4017-9eee-36aa43193b0c-kube-api-access-kkz74\") pod \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\" (UID: \"12fac5a6-e3fd-4017-9eee-36aa43193b0c\") " Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.790099 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12fac5a6-e3fd-4017-9eee-36aa43193b0c-kube-api-access-kkz74" (OuterVolumeSpecName: "kube-api-access-kkz74") pod "12fac5a6-e3fd-4017-9eee-36aa43193b0c" (UID: "12fac5a6-e3fd-4017-9eee-36aa43193b0c"). InnerVolumeSpecName "kube-api-access-kkz74". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.818992 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "12fac5a6-e3fd-4017-9eee-36aa43193b0c" (UID: "12fac5a6-e3fd-4017-9eee-36aa43193b0c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.826509 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-inventory" (OuterVolumeSpecName: "inventory") pod "12fac5a6-e3fd-4017-9eee-36aa43193b0c" (UID: "12fac5a6-e3fd-4017-9eee-36aa43193b0c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.884096 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.884143 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12fac5a6-e3fd-4017-9eee-36aa43193b0c-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:54 crc kubenswrapper[4684]: I1013 13:34:54.884163 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkz74\" (UniqueName: \"kubernetes.io/projected/12fac5a6-e3fd-4017-9eee-36aa43193b0c-kube-api-access-kkz74\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.214329 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" event={"ID":"12fac5a6-e3fd-4017-9eee-36aa43193b0c","Type":"ContainerDied","Data":"fad8fb6f0f30c742b579419f2d416a3833ff71d8f5df2fd90e5f192ae5e0ee2c"} Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.214364 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fad8fb6f0f30c742b579419f2d416a3833ff71d8f5df2fd90e5f192ae5e0ee2c" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.214395 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.295786 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x"] Oct 13 13:34:55 crc kubenswrapper[4684]: E1013 13:34:55.296209 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12fac5a6-e3fd-4017-9eee-36aa43193b0c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.296226 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="12fac5a6-e3fd-4017-9eee-36aa43193b0c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.296405 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="12fac5a6-e3fd-4017-9eee-36aa43193b0c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.297070 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.301396 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.301853 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.314502 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.314922 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x"] Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.314943 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.496351 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.496436 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-kube-api-access-2bqq5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.496460 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.598191 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.598508 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-kube-api-access-2bqq5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.598629 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.602615 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.604721 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.623840 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-kube-api-access-2bqq5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-kx22x\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:55 crc kubenswrapper[4684]: I1013 13:34:55.913475 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:34:56 crc kubenswrapper[4684]: I1013 13:34:56.488220 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x"] Oct 13 13:34:56 crc kubenswrapper[4684]: I1013 13:34:56.814824 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:56 crc kubenswrapper[4684]: I1013 13:34:56.814932 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:56 crc kubenswrapper[4684]: I1013 13:34:56.870886 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:57 crc kubenswrapper[4684]: I1013 13:34:57.236068 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" event={"ID":"aabcf2cf-0d17-4864-a8f6-55220ed4c45c","Type":"ContainerStarted","Data":"27f1db3d026fa1295d1d32fd326ef3edd75bbce0aeb628ed14d1cfd49164677c"} Oct 13 13:34:57 crc kubenswrapper[4684]: I1013 13:34:57.236120 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" event={"ID":"aabcf2cf-0d17-4864-a8f6-55220ed4c45c","Type":"ContainerStarted","Data":"cb66d18bca9b816ef3ba11cfde381a41492a6cb2f4d57be4797c7e864c544ec7"} Oct 13 13:34:57 crc kubenswrapper[4684]: I1013 13:34:57.254507 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" podStartSLOduration=1.8119537989999999 podStartE2EDuration="2.254488993s" podCreationTimestamp="2025-10-13 13:34:55 +0000 UTC" firstStartedPulling="2025-10-13 13:34:56.499464268 +0000 UTC m=+1651.066848338" lastFinishedPulling="2025-10-13 13:34:56.941999462 +0000 UTC m=+1651.509383532" observedRunningTime="2025-10-13 13:34:57.248096567 +0000 UTC m=+1651.815480627" watchObservedRunningTime="2025-10-13 13:34:57.254488993 +0000 UTC m=+1651.821873073" Oct 13 13:34:57 crc kubenswrapper[4684]: I1013 13:34:57.282464 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:57 crc kubenswrapper[4684]: I1013 13:34:57.332155 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tp972"] Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.256318 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tp972" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="registry-server" containerID="cri-o://966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d" gracePeriod=2 Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.741853 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.881653 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqkpd\" (UniqueName: \"kubernetes.io/projected/5ee75bd4-322f-417a-bec1-964d5d164ca1-kube-api-access-pqkpd\") pod \"5ee75bd4-322f-417a-bec1-964d5d164ca1\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.882102 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-utilities\") pod \"5ee75bd4-322f-417a-bec1-964d5d164ca1\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.882420 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-catalog-content\") pod \"5ee75bd4-322f-417a-bec1-964d5d164ca1\" (UID: \"5ee75bd4-322f-417a-bec1-964d5d164ca1\") " Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.883358 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-utilities" (OuterVolumeSpecName: "utilities") pod "5ee75bd4-322f-417a-bec1-964d5d164ca1" (UID: "5ee75bd4-322f-417a-bec1-964d5d164ca1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.886225 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.889969 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ee75bd4-322f-417a-bec1-964d5d164ca1-kube-api-access-pqkpd" (OuterVolumeSpecName: "kube-api-access-pqkpd") pod "5ee75bd4-322f-417a-bec1-964d5d164ca1" (UID: "5ee75bd4-322f-417a-bec1-964d5d164ca1"). InnerVolumeSpecName "kube-api-access-pqkpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.978365 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ee75bd4-322f-417a-bec1-964d5d164ca1" (UID: "5ee75bd4-322f-417a-bec1-964d5d164ca1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.988266 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ee75bd4-322f-417a-bec1-964d5d164ca1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:34:59 crc kubenswrapper[4684]: I1013 13:34:59.988307 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqkpd\" (UniqueName: \"kubernetes.io/projected/5ee75bd4-322f-417a-bec1-964d5d164ca1-kube-api-access-pqkpd\") on node \"crc\" DevicePath \"\"" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.266917 4684 generic.go:334] "Generic (PLEG): container finished" podID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerID="966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d" exitCode=0 Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.266970 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerDied","Data":"966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d"} Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.266998 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tp972" event={"ID":"5ee75bd4-322f-417a-bec1-964d5d164ca1","Type":"ContainerDied","Data":"9b75bb018d4c8090781a76291327d708aa5f7ce7fca4fbd6e6c8db0f40e877bf"} Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.266995 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tp972" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.267021 4684 scope.go:117] "RemoveContainer" containerID="966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.285238 4684 scope.go:117] "RemoveContainer" containerID="f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.306982 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tp972"] Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.314169 4684 scope.go:117] "RemoveContainer" containerID="125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.315081 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tp972"] Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.356362 4684 scope.go:117] "RemoveContainer" containerID="966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d" Oct 13 13:35:00 crc kubenswrapper[4684]: E1013 13:35:00.356684 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d\": container with ID starting with 966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d not found: ID does not exist" containerID="966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.356712 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d"} err="failed to get container status \"966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d\": rpc error: code = NotFound desc = could not find container \"966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d\": container with ID starting with 966dffaa107d84f3f99942739aa1b6cfd35ba16abca07bf0c8c57ec3ced2737d not found: ID does not exist" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.356731 4684 scope.go:117] "RemoveContainer" containerID="f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8" Oct 13 13:35:00 crc kubenswrapper[4684]: E1013 13:35:00.357128 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8\": container with ID starting with f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8 not found: ID does not exist" containerID="f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.357156 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8"} err="failed to get container status \"f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8\": rpc error: code = NotFound desc = could not find container \"f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8\": container with ID starting with f571a867c98432b91606f9e611f3b672846b1bf0300016e9f24a51d42f41f7d8 not found: ID does not exist" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.357172 4684 scope.go:117] "RemoveContainer" containerID="125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5" Oct 13 13:35:00 crc kubenswrapper[4684]: E1013 13:35:00.357395 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5\": container with ID starting with 125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5 not found: ID does not exist" containerID="125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.357416 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5"} err="failed to get container status \"125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5\": rpc error: code = NotFound desc = could not find container \"125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5\": container with ID starting with 125ab5acec4e3122474d78518d7e0250c2f060cd1cab18149abca4c47169f2b5 not found: ID does not exist" Oct 13 13:35:00 crc kubenswrapper[4684]: I1013 13:35:00.366513 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" path="/var/lib/kubelet/pods/5ee75bd4-322f-417a-bec1-964d5d164ca1/volumes" Oct 13 13:35:01 crc kubenswrapper[4684]: I1013 13:35:01.350720 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:35:01 crc kubenswrapper[4684]: E1013 13:35:01.351132 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:35:06 crc kubenswrapper[4684]: I1013 13:35:06.037586 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xq69h"] Oct 13 13:35:06 crc kubenswrapper[4684]: I1013 13:35:06.046040 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-xq69h"] Oct 13 13:35:06 crc kubenswrapper[4684]: I1013 13:35:06.360573 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a57c6496-4904-4c3d-b12a-7f06e4f305d3" path="/var/lib/kubelet/pods/a57c6496-4904-4c3d-b12a-7f06e4f305d3/volumes" Oct 13 13:35:16 crc kubenswrapper[4684]: I1013 13:35:16.360624 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:35:16 crc kubenswrapper[4684]: E1013 13:35:16.361398 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:35:27 crc kubenswrapper[4684]: I1013 13:35:27.350781 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:35:27 crc kubenswrapper[4684]: E1013 13:35:27.351611 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:35:28 crc kubenswrapper[4684]: I1013 13:35:28.045036 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-psw2f"] Oct 13 13:35:28 crc kubenswrapper[4684]: I1013 13:35:28.051501 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-psw2f"] Oct 13 13:35:28 crc kubenswrapper[4684]: I1013 13:35:28.362252 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a538cd5e-c95c-430e-be87-cdc9256cb876" path="/var/lib/kubelet/pods/a538cd5e-c95c-430e-be87-cdc9256cb876/volumes" Oct 13 13:35:31 crc kubenswrapper[4684]: I1013 13:35:31.054576 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xjt66"] Oct 13 13:35:31 crc kubenswrapper[4684]: I1013 13:35:31.061523 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xjt66"] Oct 13 13:35:32 crc kubenswrapper[4684]: I1013 13:35:32.364565 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5097f057-d2b4-41ba-bf72-d31f0d346d06" path="/var/lib/kubelet/pods/5097f057-d2b4-41ba-bf72-d31f0d346d06/volumes" Oct 13 13:35:38 crc kubenswrapper[4684]: I1013 13:35:38.351697 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:35:38 crc kubenswrapper[4684]: E1013 13:35:38.352508 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:35:38 crc kubenswrapper[4684]: I1013 13:35:38.615972 4684 generic.go:334] "Generic (PLEG): container finished" podID="aabcf2cf-0d17-4864-a8f6-55220ed4c45c" containerID="27f1db3d026fa1295d1d32fd326ef3edd75bbce0aeb628ed14d1cfd49164677c" exitCode=0 Oct 13 13:35:38 crc kubenswrapper[4684]: I1013 13:35:38.616099 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" event={"ID":"aabcf2cf-0d17-4864-a8f6-55220ed4c45c","Type":"ContainerDied","Data":"27f1db3d026fa1295d1d32fd326ef3edd75bbce0aeb628ed14d1cfd49164677c"} Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.009632 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.201974 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-ssh-key\") pod \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.202100 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-kube-api-access-2bqq5\") pod \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.202177 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-inventory\") pod \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\" (UID: \"aabcf2cf-0d17-4864-a8f6-55220ed4c45c\") " Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.209837 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-kube-api-access-2bqq5" (OuterVolumeSpecName: "kube-api-access-2bqq5") pod "aabcf2cf-0d17-4864-a8f6-55220ed4c45c" (UID: "aabcf2cf-0d17-4864-a8f6-55220ed4c45c"). InnerVolumeSpecName "kube-api-access-2bqq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.236930 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aabcf2cf-0d17-4864-a8f6-55220ed4c45c" (UID: "aabcf2cf-0d17-4864-a8f6-55220ed4c45c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.238406 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-inventory" (OuterVolumeSpecName: "inventory") pod "aabcf2cf-0d17-4864-a8f6-55220ed4c45c" (UID: "aabcf2cf-0d17-4864-a8f6-55220ed4c45c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.305105 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.305137 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-kube-api-access-2bqq5\") on node \"crc\" DevicePath \"\"" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.305150 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aabcf2cf-0d17-4864-a8f6-55220ed4c45c-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.637001 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" event={"ID":"aabcf2cf-0d17-4864-a8f6-55220ed4c45c","Type":"ContainerDied","Data":"cb66d18bca9b816ef3ba11cfde381a41492a6cb2f4d57be4797c7e864c544ec7"} Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.637046 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb66d18bca9b816ef3ba11cfde381a41492a6cb2f4d57be4797c7e864c544ec7" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.637051 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-kx22x" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.718759 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg"] Oct 13 13:35:40 crc kubenswrapper[4684]: E1013 13:35:40.719321 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="extract-content" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.719350 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="extract-content" Oct 13 13:35:40 crc kubenswrapper[4684]: E1013 13:35:40.719374 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aabcf2cf-0d17-4864-a8f6-55220ed4c45c" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.719385 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="aabcf2cf-0d17-4864-a8f6-55220ed4c45c" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:35:40 crc kubenswrapper[4684]: E1013 13:35:40.719402 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="extract-utilities" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.719410 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="extract-utilities" Oct 13 13:35:40 crc kubenswrapper[4684]: E1013 13:35:40.719430 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="registry-server" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.719438 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="registry-server" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.719678 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="aabcf2cf-0d17-4864-a8f6-55220ed4c45c" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.719733 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ee75bd4-322f-417a-bec1-964d5d164ca1" containerName="registry-server" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.720622 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.725349 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.725435 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.725623 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.726013 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.733628 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg"] Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.917861 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.918168 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:40 crc kubenswrapper[4684]: I1013 13:35:40.918248 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8nmn\" (UniqueName: \"kubernetes.io/projected/29952780-72b6-4f29-9d43-06e33d6dd41a-kube-api-access-g8nmn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.020513 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.020625 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8nmn\" (UniqueName: \"kubernetes.io/projected/29952780-72b6-4f29-9d43-06e33d6dd41a-kube-api-access-g8nmn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.020772 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.025373 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.025628 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.051798 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8nmn\" (UniqueName: \"kubernetes.io/projected/29952780-72b6-4f29-9d43-06e33d6dd41a-kube-api-access-g8nmn\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:41 crc kubenswrapper[4684]: I1013 13:35:41.344330 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:35:42 crc kubenswrapper[4684]: I1013 13:35:42.052096 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg"] Oct 13 13:35:42 crc kubenswrapper[4684]: W1013 13:35:42.061852 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29952780_72b6_4f29_9d43_06e33d6dd41a.slice/crio-2b8c98fe65ece71adf80cfb85e39e3ea0077f6ada39abd489150b947bf296337 WatchSource:0}: Error finding container 2b8c98fe65ece71adf80cfb85e39e3ea0077f6ada39abd489150b947bf296337: Status 404 returned error can't find the container with id 2b8c98fe65ece71adf80cfb85e39e3ea0077f6ada39abd489150b947bf296337 Oct 13 13:35:42 crc kubenswrapper[4684]: I1013 13:35:42.658844 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" event={"ID":"29952780-72b6-4f29-9d43-06e33d6dd41a","Type":"ContainerStarted","Data":"2b8c98fe65ece71adf80cfb85e39e3ea0077f6ada39abd489150b947bf296337"} Oct 13 13:35:43 crc kubenswrapper[4684]: I1013 13:35:43.668036 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" event={"ID":"29952780-72b6-4f29-9d43-06e33d6dd41a","Type":"ContainerStarted","Data":"8d88c2521d551db1e5ed5292cb5c81946e77d5d6dcf4a62d48a26bd7b6655453"} Oct 13 13:35:43 crc kubenswrapper[4684]: I1013 13:35:43.689713 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" podStartSLOduration=3.152293437 podStartE2EDuration="3.689693261s" podCreationTimestamp="2025-10-13 13:35:40 +0000 UTC" firstStartedPulling="2025-10-13 13:35:42.063282084 +0000 UTC m=+1696.630666144" lastFinishedPulling="2025-10-13 13:35:42.600681898 +0000 UTC m=+1697.168065968" observedRunningTime="2025-10-13 13:35:43.684200768 +0000 UTC m=+1698.251584838" watchObservedRunningTime="2025-10-13 13:35:43.689693261 +0000 UTC m=+1698.257077331" Oct 13 13:35:49 crc kubenswrapper[4684]: I1013 13:35:49.880047 4684 scope.go:117] "RemoveContainer" containerID="b4388afd3ba22307e08de0aa8ce2372a4ef76cfaf391bdca7df3313b56501a55" Oct 13 13:35:49 crc kubenswrapper[4684]: I1013 13:35:49.928681 4684 scope.go:117] "RemoveContainer" containerID="d8027453ae279cbf845f8b00002d138204406f067600c4a5b58f53cc193a9b65" Oct 13 13:35:49 crc kubenswrapper[4684]: I1013 13:35:49.978204 4684 scope.go:117] "RemoveContainer" containerID="7a7167e6efc1451b903956e96cbf042adc479fe0ba70ea396956093709320553" Oct 13 13:35:50 crc kubenswrapper[4684]: I1013 13:35:50.351167 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:35:50 crc kubenswrapper[4684]: E1013 13:35:50.351597 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:36:04 crc kubenswrapper[4684]: I1013 13:36:04.351229 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:36:04 crc kubenswrapper[4684]: E1013 13:36:04.352018 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:36:14 crc kubenswrapper[4684]: I1013 13:36:14.044557 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-dz77b"] Oct 13 13:36:14 crc kubenswrapper[4684]: I1013 13:36:14.052255 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-dz77b"] Oct 13 13:36:14 crc kubenswrapper[4684]: I1013 13:36:14.367462 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d7ae4ab-21ea-43b7-b33b-0abe75b043dc" path="/var/lib/kubelet/pods/2d7ae4ab-21ea-43b7-b33b-0abe75b043dc/volumes" Oct 13 13:36:18 crc kubenswrapper[4684]: I1013 13:36:18.350991 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:36:18 crc kubenswrapper[4684]: E1013 13:36:18.351604 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:36:31 crc kubenswrapper[4684]: I1013 13:36:31.350691 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:36:31 crc kubenswrapper[4684]: E1013 13:36:31.351491 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:36:39 crc kubenswrapper[4684]: I1013 13:36:39.214101 4684 generic.go:334] "Generic (PLEG): container finished" podID="29952780-72b6-4f29-9d43-06e33d6dd41a" containerID="8d88c2521d551db1e5ed5292cb5c81946e77d5d6dcf4a62d48a26bd7b6655453" exitCode=2 Oct 13 13:36:39 crc kubenswrapper[4684]: I1013 13:36:39.214205 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" event={"ID":"29952780-72b6-4f29-9d43-06e33d6dd41a","Type":"ContainerDied","Data":"8d88c2521d551db1e5ed5292cb5c81946e77d5d6dcf4a62d48a26bd7b6655453"} Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.696078 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.856642 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-inventory\") pod \"29952780-72b6-4f29-9d43-06e33d6dd41a\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.857014 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-ssh-key\") pod \"29952780-72b6-4f29-9d43-06e33d6dd41a\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.857135 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8nmn\" (UniqueName: \"kubernetes.io/projected/29952780-72b6-4f29-9d43-06e33d6dd41a-kube-api-access-g8nmn\") pod \"29952780-72b6-4f29-9d43-06e33d6dd41a\" (UID: \"29952780-72b6-4f29-9d43-06e33d6dd41a\") " Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.863975 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29952780-72b6-4f29-9d43-06e33d6dd41a-kube-api-access-g8nmn" (OuterVolumeSpecName: "kube-api-access-g8nmn") pod "29952780-72b6-4f29-9d43-06e33d6dd41a" (UID: "29952780-72b6-4f29-9d43-06e33d6dd41a"). InnerVolumeSpecName "kube-api-access-g8nmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.890043 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-inventory" (OuterVolumeSpecName: "inventory") pod "29952780-72b6-4f29-9d43-06e33d6dd41a" (UID: "29952780-72b6-4f29-9d43-06e33d6dd41a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.912135 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "29952780-72b6-4f29-9d43-06e33d6dd41a" (UID: "29952780-72b6-4f29-9d43-06e33d6dd41a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.959450 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.959481 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8nmn\" (UniqueName: \"kubernetes.io/projected/29952780-72b6-4f29-9d43-06e33d6dd41a-kube-api-access-g8nmn\") on node \"crc\" DevicePath \"\"" Oct 13 13:36:40 crc kubenswrapper[4684]: I1013 13:36:40.959491 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29952780-72b6-4f29-9d43-06e33d6dd41a-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:36:41 crc kubenswrapper[4684]: I1013 13:36:41.236565 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" event={"ID":"29952780-72b6-4f29-9d43-06e33d6dd41a","Type":"ContainerDied","Data":"2b8c98fe65ece71adf80cfb85e39e3ea0077f6ada39abd489150b947bf296337"} Oct 13 13:36:41 crc kubenswrapper[4684]: I1013 13:36:41.236612 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b8c98fe65ece71adf80cfb85e39e3ea0077f6ada39abd489150b947bf296337" Oct 13 13:36:41 crc kubenswrapper[4684]: I1013 13:36:41.236635 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg" Oct 13 13:36:46 crc kubenswrapper[4684]: I1013 13:36:46.363488 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:36:46 crc kubenswrapper[4684]: E1013 13:36:46.364458 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.026139 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f"] Oct 13 13:36:48 crc kubenswrapper[4684]: E1013 13:36:48.026739 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29952780-72b6-4f29-9d43-06e33d6dd41a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.026753 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="29952780-72b6-4f29-9d43-06e33d6dd41a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.026947 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="29952780-72b6-4f29-9d43-06e33d6dd41a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.027546 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.029821 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.029997 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.030162 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.030447 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.046582 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f"] Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.195353 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w56ss\" (UniqueName: \"kubernetes.io/projected/0b60be99-cf7b-40e7-8c3b-539d082dd005-kube-api-access-w56ss\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.195450 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.195545 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.297341 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.297674 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.297752 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w56ss\" (UniqueName: \"kubernetes.io/projected/0b60be99-cf7b-40e7-8c3b-539d082dd005-kube-api-access-w56ss\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.302718 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.303343 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.334372 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w56ss\" (UniqueName: \"kubernetes.io/projected/0b60be99-cf7b-40e7-8c3b-539d082dd005-kube-api-access-w56ss\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.363534 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:36:48 crc kubenswrapper[4684]: I1013 13:36:48.941670 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f"] Oct 13 13:36:49 crc kubenswrapper[4684]: I1013 13:36:49.321710 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" event={"ID":"0b60be99-cf7b-40e7-8c3b-539d082dd005","Type":"ContainerStarted","Data":"9353de3f3629d998679435f3ba01fe321cd2a736ed036efc0af6aa2202d76e32"} Oct 13 13:36:50 crc kubenswrapper[4684]: I1013 13:36:50.110802 4684 scope.go:117] "RemoveContainer" containerID="7c8eb225eb18436868922853baa2ded66725adcc42a358daed53643bae5db44b" Oct 13 13:36:50 crc kubenswrapper[4684]: I1013 13:36:50.331394 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" event={"ID":"0b60be99-cf7b-40e7-8c3b-539d082dd005","Type":"ContainerStarted","Data":"31495fef3a2f7dad68cdbd09d988299f13b7afe188940a349b94f144d1e5b1b1"} Oct 13 13:36:57 crc kubenswrapper[4684]: I1013 13:36:57.350418 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:36:57 crc kubenswrapper[4684]: E1013 13:36:57.351369 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:37:09 crc kubenswrapper[4684]: I1013 13:37:09.351421 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:37:09 crc kubenswrapper[4684]: E1013 13:37:09.352252 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:37:24 crc kubenswrapper[4684]: I1013 13:37:24.350627 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:37:24 crc kubenswrapper[4684]: E1013 13:37:24.351731 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:37:38 crc kubenswrapper[4684]: I1013 13:37:38.351626 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:37:38 crc kubenswrapper[4684]: I1013 13:37:38.867234 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"ede402027974274ba6fe309180fbdcdddd9fb465f99a94370fe07cbd3a326e81"} Oct 13 13:37:38 crc kubenswrapper[4684]: I1013 13:37:38.868869 4684 generic.go:334] "Generic (PLEG): container finished" podID="0b60be99-cf7b-40e7-8c3b-539d082dd005" containerID="31495fef3a2f7dad68cdbd09d988299f13b7afe188940a349b94f144d1e5b1b1" exitCode=0 Oct 13 13:37:38 crc kubenswrapper[4684]: I1013 13:37:38.868924 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" event={"ID":"0b60be99-cf7b-40e7-8c3b-539d082dd005","Type":"ContainerDied","Data":"31495fef3a2f7dad68cdbd09d988299f13b7afe188940a349b94f144d1e5b1b1"} Oct 13 13:37:38 crc kubenswrapper[4684]: I1013 13:37:38.885842 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" podStartSLOduration=50.36312596 podStartE2EDuration="50.885821232s" podCreationTimestamp="2025-10-13 13:36:48 +0000 UTC" firstStartedPulling="2025-10-13 13:36:48.948728185 +0000 UTC m=+1763.516112265" lastFinishedPulling="2025-10-13 13:36:49.471423437 +0000 UTC m=+1764.038807537" observedRunningTime="2025-10-13 13:36:50.354706623 +0000 UTC m=+1764.922090693" watchObservedRunningTime="2025-10-13 13:37:38.885821232 +0000 UTC m=+1813.453205302" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.285732 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.394382 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-inventory\") pod \"0b60be99-cf7b-40e7-8c3b-539d082dd005\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.394623 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w56ss\" (UniqueName: \"kubernetes.io/projected/0b60be99-cf7b-40e7-8c3b-539d082dd005-kube-api-access-w56ss\") pod \"0b60be99-cf7b-40e7-8c3b-539d082dd005\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.394651 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-ssh-key\") pod \"0b60be99-cf7b-40e7-8c3b-539d082dd005\" (UID: \"0b60be99-cf7b-40e7-8c3b-539d082dd005\") " Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.400097 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b60be99-cf7b-40e7-8c3b-539d082dd005-kube-api-access-w56ss" (OuterVolumeSpecName: "kube-api-access-w56ss") pod "0b60be99-cf7b-40e7-8c3b-539d082dd005" (UID: "0b60be99-cf7b-40e7-8c3b-539d082dd005"). InnerVolumeSpecName "kube-api-access-w56ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.422605 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-inventory" (OuterVolumeSpecName: "inventory") pod "0b60be99-cf7b-40e7-8c3b-539d082dd005" (UID: "0b60be99-cf7b-40e7-8c3b-539d082dd005"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.422805 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0b60be99-cf7b-40e7-8c3b-539d082dd005" (UID: "0b60be99-cf7b-40e7-8c3b-539d082dd005"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.497783 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.497826 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w56ss\" (UniqueName: \"kubernetes.io/projected/0b60be99-cf7b-40e7-8c3b-539d082dd005-kube-api-access-w56ss\") on node \"crc\" DevicePath \"\"" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.497842 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b60be99-cf7b-40e7-8c3b-539d082dd005-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.888430 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" event={"ID":"0b60be99-cf7b-40e7-8c3b-539d082dd005","Type":"ContainerDied","Data":"9353de3f3629d998679435f3ba01fe321cd2a736ed036efc0af6aa2202d76e32"} Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.888677 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9353de3f3629d998679435f3ba01fe321cd2a736ed036efc0af6aa2202d76e32" Oct 13 13:37:40 crc kubenswrapper[4684]: I1013 13:37:40.888545 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.008137 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-l9jlr"] Oct 13 13:37:41 crc kubenswrapper[4684]: E1013 13:37:41.008519 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b60be99-cf7b-40e7-8c3b-539d082dd005" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.008538 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b60be99-cf7b-40e7-8c3b-539d082dd005" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.008724 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b60be99-cf7b-40e7-8c3b-539d082dd005" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.009345 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.015454 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.015614 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.015669 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.015678 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.026717 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-l9jlr"] Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.109253 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvppd\" (UniqueName: \"kubernetes.io/projected/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-kube-api-access-kvppd\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.109334 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.109371 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.211664 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvppd\" (UniqueName: \"kubernetes.io/projected/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-kube-api-access-kvppd\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.211763 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.211801 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.238864 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.239869 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.252002 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvppd\" (UniqueName: \"kubernetes.io/projected/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-kube-api-access-kvppd\") pod \"ssh-known-hosts-edpm-deployment-l9jlr\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.328338 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:41 crc kubenswrapper[4684]: I1013 13:37:41.894977 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-l9jlr"] Oct 13 13:37:42 crc kubenswrapper[4684]: I1013 13:37:42.912620 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" event={"ID":"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11","Type":"ContainerStarted","Data":"700804be1a8993710f3f05479c2e8453de96c19cba376bc2400c5c30efa53f3a"} Oct 13 13:37:42 crc kubenswrapper[4684]: I1013 13:37:42.913159 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" event={"ID":"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11","Type":"ContainerStarted","Data":"88f79316e6ee4be5455b9181c3db6f9099d0e2867790fe91ea09429e842f6c0e"} Oct 13 13:37:42 crc kubenswrapper[4684]: I1013 13:37:42.936870 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" podStartSLOduration=2.187658094 podStartE2EDuration="2.936853436s" podCreationTimestamp="2025-10-13 13:37:40 +0000 UTC" firstStartedPulling="2025-10-13 13:37:41.91106934 +0000 UTC m=+1816.478453410" lastFinishedPulling="2025-10-13 13:37:42.660264662 +0000 UTC m=+1817.227648752" observedRunningTime="2025-10-13 13:37:42.932633833 +0000 UTC m=+1817.500018023" watchObservedRunningTime="2025-10-13 13:37:42.936853436 +0000 UTC m=+1817.504237506" Oct 13 13:37:50 crc kubenswrapper[4684]: I1013 13:37:50.993311 4684 generic.go:334] "Generic (PLEG): container finished" podID="1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" containerID="700804be1a8993710f3f05479c2e8453de96c19cba376bc2400c5c30efa53f3a" exitCode=0 Oct 13 13:37:50 crc kubenswrapper[4684]: I1013 13:37:50.993380 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" event={"ID":"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11","Type":"ContainerDied","Data":"700804be1a8993710f3f05479c2e8453de96c19cba376bc2400c5c30efa53f3a"} Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.436512 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.537940 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-inventory-0\") pod \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.538438 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvppd\" (UniqueName: \"kubernetes.io/projected/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-kube-api-access-kvppd\") pod \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.538516 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-ssh-key-openstack-edpm-ipam\") pod \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\" (UID: \"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11\") " Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.548588 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-kube-api-access-kvppd" (OuterVolumeSpecName: "kube-api-access-kvppd") pod "1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" (UID: "1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11"). InnerVolumeSpecName "kube-api-access-kvppd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.566616 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" (UID: "1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.591765 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" (UID: "1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.641403 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvppd\" (UniqueName: \"kubernetes.io/projected/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-kube-api-access-kvppd\") on node \"crc\" DevicePath \"\"" Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.641472 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 13 13:37:52 crc kubenswrapper[4684]: I1013 13:37:52.641489 4684 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.018344 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" event={"ID":"1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11","Type":"ContainerDied","Data":"88f79316e6ee4be5455b9181c3db6f9099d0e2867790fe91ea09429e842f6c0e"} Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.018385 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88f79316e6ee4be5455b9181c3db6f9099d0e2867790fe91ea09429e842f6c0e" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.018405 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l9jlr" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.109363 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n"] Oct 13 13:37:53 crc kubenswrapper[4684]: E1013 13:37:53.109973 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" containerName="ssh-known-hosts-edpm-deployment" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.110002 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" containerName="ssh-known-hosts-edpm-deployment" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.110296 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11" containerName="ssh-known-hosts-edpm-deployment" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.111319 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.116821 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.116970 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.117228 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.117429 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.124983 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n"] Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.155241 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.155326 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.155383 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swfgf\" (UniqueName: \"kubernetes.io/projected/6a0d23bf-ab14-453f-b23c-eebd64623b73-kube-api-access-swfgf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.256672 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.256734 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.256793 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swfgf\" (UniqueName: \"kubernetes.io/projected/6a0d23bf-ab14-453f-b23c-eebd64623b73-kube-api-access-swfgf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.261641 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.262845 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.278888 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swfgf\" (UniqueName: \"kubernetes.io/projected/6a0d23bf-ab14-453f-b23c-eebd64623b73-kube-api-access-swfgf\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8hz7n\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.432204 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:37:53 crc kubenswrapper[4684]: I1013 13:37:53.790250 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n"] Oct 13 13:37:53 crc kubenswrapper[4684]: W1013 13:37:53.797713 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a0d23bf_ab14_453f_b23c_eebd64623b73.slice/crio-d961caae2d1a8cf6ab1a6ca8662bbcd859b5fc74d37ac19d08ee771befc5fb95 WatchSource:0}: Error finding container d961caae2d1a8cf6ab1a6ca8662bbcd859b5fc74d37ac19d08ee771befc5fb95: Status 404 returned error can't find the container with id d961caae2d1a8cf6ab1a6ca8662bbcd859b5fc74d37ac19d08ee771befc5fb95 Oct 13 13:37:54 crc kubenswrapper[4684]: I1013 13:37:54.029331 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" event={"ID":"6a0d23bf-ab14-453f-b23c-eebd64623b73","Type":"ContainerStarted","Data":"d961caae2d1a8cf6ab1a6ca8662bbcd859b5fc74d37ac19d08ee771befc5fb95"} Oct 13 13:37:57 crc kubenswrapper[4684]: I1013 13:37:57.057610 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" event={"ID":"6a0d23bf-ab14-453f-b23c-eebd64623b73","Type":"ContainerStarted","Data":"c36821fef46e3f98917fa3e60b896bc036d4b3819ed26c4198e5ffce0d3953e3"} Oct 13 13:37:57 crc kubenswrapper[4684]: I1013 13:37:57.084485 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" podStartSLOduration=1.761125829 podStartE2EDuration="4.084454743s" podCreationTimestamp="2025-10-13 13:37:53 +0000 UTC" firstStartedPulling="2025-10-13 13:37:53.801242016 +0000 UTC m=+1828.368626106" lastFinishedPulling="2025-10-13 13:37:56.12457091 +0000 UTC m=+1830.691955020" observedRunningTime="2025-10-13 13:37:57.076469113 +0000 UTC m=+1831.643853253" watchObservedRunningTime="2025-10-13 13:37:57.084454743 +0000 UTC m=+1831.651838833" Oct 13 13:38:05 crc kubenswrapper[4684]: I1013 13:38:05.172506 4684 generic.go:334] "Generic (PLEG): container finished" podID="6a0d23bf-ab14-453f-b23c-eebd64623b73" containerID="c36821fef46e3f98917fa3e60b896bc036d4b3819ed26c4198e5ffce0d3953e3" exitCode=0 Oct 13 13:38:05 crc kubenswrapper[4684]: I1013 13:38:05.172596 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" event={"ID":"6a0d23bf-ab14-453f-b23c-eebd64623b73","Type":"ContainerDied","Data":"c36821fef46e3f98917fa3e60b896bc036d4b3819ed26c4198e5ffce0d3953e3"} Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.609255 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.729782 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swfgf\" (UniqueName: \"kubernetes.io/projected/6a0d23bf-ab14-453f-b23c-eebd64623b73-kube-api-access-swfgf\") pod \"6a0d23bf-ab14-453f-b23c-eebd64623b73\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.729953 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-inventory\") pod \"6a0d23bf-ab14-453f-b23c-eebd64623b73\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.730235 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-ssh-key\") pod \"6a0d23bf-ab14-453f-b23c-eebd64623b73\" (UID: \"6a0d23bf-ab14-453f-b23c-eebd64623b73\") " Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.736252 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a0d23bf-ab14-453f-b23c-eebd64623b73-kube-api-access-swfgf" (OuterVolumeSpecName: "kube-api-access-swfgf") pod "6a0d23bf-ab14-453f-b23c-eebd64623b73" (UID: "6a0d23bf-ab14-453f-b23c-eebd64623b73"). InnerVolumeSpecName "kube-api-access-swfgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.768253 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-inventory" (OuterVolumeSpecName: "inventory") pod "6a0d23bf-ab14-453f-b23c-eebd64623b73" (UID: "6a0d23bf-ab14-453f-b23c-eebd64623b73"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.782607 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6a0d23bf-ab14-453f-b23c-eebd64623b73" (UID: "6a0d23bf-ab14-453f-b23c-eebd64623b73"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.833186 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swfgf\" (UniqueName: \"kubernetes.io/projected/6a0d23bf-ab14-453f-b23c-eebd64623b73-kube-api-access-swfgf\") on node \"crc\" DevicePath \"\"" Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.833224 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:38:06 crc kubenswrapper[4684]: I1013 13:38:06.833240 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a0d23bf-ab14-453f-b23c-eebd64623b73-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.191320 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" event={"ID":"6a0d23bf-ab14-453f-b23c-eebd64623b73","Type":"ContainerDied","Data":"d961caae2d1a8cf6ab1a6ca8662bbcd859b5fc74d37ac19d08ee771befc5fb95"} Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.191597 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d961caae2d1a8cf6ab1a6ca8662bbcd859b5fc74d37ac19d08ee771befc5fb95" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.191372 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8hz7n" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.272165 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl"] Oct 13 13:38:07 crc kubenswrapper[4684]: E1013 13:38:07.272636 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a0d23bf-ab14-453f-b23c-eebd64623b73" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.272656 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a0d23bf-ab14-453f-b23c-eebd64623b73" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.272890 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a0d23bf-ab14-453f-b23c-eebd64623b73" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.273751 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.277129 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.277677 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.277783 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.283288 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl"] Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.288679 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.343828 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.343966 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj4wr\" (UniqueName: \"kubernetes.io/projected/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-kube-api-access-xj4wr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.344271 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.445986 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj4wr\" (UniqueName: \"kubernetes.io/projected/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-kube-api-access-xj4wr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.446335 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.446498 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.451493 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.452375 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.462467 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj4wr\" (UniqueName: \"kubernetes.io/projected/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-kube-api-access-xj4wr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:07 crc kubenswrapper[4684]: I1013 13:38:07.604804 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:08 crc kubenswrapper[4684]: I1013 13:38:08.123845 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl"] Oct 13 13:38:08 crc kubenswrapper[4684]: I1013 13:38:08.131193 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:38:08 crc kubenswrapper[4684]: I1013 13:38:08.202849 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" event={"ID":"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9","Type":"ContainerStarted","Data":"943d1140e4b673e78269fbe05457461790b2570b499c772df0042a81a7238f83"} Oct 13 13:38:09 crc kubenswrapper[4684]: I1013 13:38:09.217354 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" event={"ID":"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9","Type":"ContainerStarted","Data":"849d27cc816f40355ccce4b929bbd6abbbe158d3939b1ceeae6e7a37c5607c99"} Oct 13 13:38:09 crc kubenswrapper[4684]: I1013 13:38:09.246248 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" podStartSLOduration=1.657535233 podStartE2EDuration="2.246222507s" podCreationTimestamp="2025-10-13 13:38:07 +0000 UTC" firstStartedPulling="2025-10-13 13:38:08.130888568 +0000 UTC m=+1842.698272638" lastFinishedPulling="2025-10-13 13:38:08.719575842 +0000 UTC m=+1843.286959912" observedRunningTime="2025-10-13 13:38:09.241061365 +0000 UTC m=+1843.808445455" watchObservedRunningTime="2025-10-13 13:38:09.246222507 +0000 UTC m=+1843.813606577" Oct 13 13:38:19 crc kubenswrapper[4684]: I1013 13:38:19.338667 4684 generic.go:334] "Generic (PLEG): container finished" podID="0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" containerID="849d27cc816f40355ccce4b929bbd6abbbe158d3939b1ceeae6e7a37c5607c99" exitCode=0 Oct 13 13:38:19 crc kubenswrapper[4684]: I1013 13:38:19.338808 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" event={"ID":"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9","Type":"ContainerDied","Data":"849d27cc816f40355ccce4b929bbd6abbbe158d3939b1ceeae6e7a37c5607c99"} Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.736283 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.912796 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-ssh-key\") pod \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.913205 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj4wr\" (UniqueName: \"kubernetes.io/projected/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-kube-api-access-xj4wr\") pod \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.913284 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-inventory\") pod \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\" (UID: \"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9\") " Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.927381 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-kube-api-access-xj4wr" (OuterVolumeSpecName: "kube-api-access-xj4wr") pod "0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" (UID: "0a757d2e-3642-4d64-9cf8-e0b29e43bbb9"). InnerVolumeSpecName "kube-api-access-xj4wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.944939 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-inventory" (OuterVolumeSpecName: "inventory") pod "0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" (UID: "0a757d2e-3642-4d64-9cf8-e0b29e43bbb9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:38:20 crc kubenswrapper[4684]: I1013 13:38:20.954357 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" (UID: "0a757d2e-3642-4d64-9cf8-e0b29e43bbb9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.015860 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.016230 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj4wr\" (UniqueName: \"kubernetes.io/projected/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-kube-api-access-xj4wr\") on node \"crc\" DevicePath \"\"" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.016246 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a757d2e-3642-4d64-9cf8-e0b29e43bbb9-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.364957 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" event={"ID":"0a757d2e-3642-4d64-9cf8-e0b29e43bbb9","Type":"ContainerDied","Data":"943d1140e4b673e78269fbe05457461790b2570b499c772df0042a81a7238f83"} Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.364998 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="943d1140e4b673e78269fbe05457461790b2570b499c772df0042a81a7238f83" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.365056 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.484861 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t"] Oct 13 13:38:21 crc kubenswrapper[4684]: E1013 13:38:21.485343 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.485362 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.485547 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a757d2e-3642-4d64-9cf8-e0b29e43bbb9" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.486238 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.491447 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.491633 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.491725 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.491778 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.491839 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.491982 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.492161 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.492281 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.500716 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t"] Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530207 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7622m\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-kube-api-access-7622m\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530278 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530311 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530336 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530410 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530444 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530477 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530545 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530579 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530684 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530820 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530920 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.530950 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.531031 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632062 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632125 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632153 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632196 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632231 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632258 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632277 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632311 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632341 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7622m\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-kube-api-access-7622m\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632360 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632380 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632404 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632434 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.632452 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.637048 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.637248 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.637913 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.638072 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.638274 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.638657 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.639982 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.639757 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.640297 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.640298 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.641704 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.643509 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.643629 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.663443 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7622m\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-kube-api-access-7622m\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hj69t\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:21 crc kubenswrapper[4684]: I1013 13:38:21.847330 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:38:22 crc kubenswrapper[4684]: W1013 13:38:22.370416 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a51a7e3_30bb_4bce_889c_b13d919ef64c.slice/crio-58e298b6c0a3531b8bec2c9b92e37fb84735859d0bd14ddc6c95a1b6254c49af WatchSource:0}: Error finding container 58e298b6c0a3531b8bec2c9b92e37fb84735859d0bd14ddc6c95a1b6254c49af: Status 404 returned error can't find the container with id 58e298b6c0a3531b8bec2c9b92e37fb84735859d0bd14ddc6c95a1b6254c49af Oct 13 13:38:22 crc kubenswrapper[4684]: I1013 13:38:22.394294 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t"] Oct 13 13:38:23 crc kubenswrapper[4684]: I1013 13:38:23.407545 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" event={"ID":"7a51a7e3-30bb-4bce-889c-b13d919ef64c","Type":"ContainerStarted","Data":"1ef93736f471daeef327a7b9cdb582d83e5ea43a048cfa579727ca8dedcdd4bf"} Oct 13 13:38:23 crc kubenswrapper[4684]: I1013 13:38:23.408844 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" event={"ID":"7a51a7e3-30bb-4bce-889c-b13d919ef64c","Type":"ContainerStarted","Data":"58e298b6c0a3531b8bec2c9b92e37fb84735859d0bd14ddc6c95a1b6254c49af"} Oct 13 13:38:23 crc kubenswrapper[4684]: I1013 13:38:23.436830 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" podStartSLOduration=1.903368605 podStartE2EDuration="2.436808463s" podCreationTimestamp="2025-10-13 13:38:21 +0000 UTC" firstStartedPulling="2025-10-13 13:38:22.375078539 +0000 UTC m=+1856.942462609" lastFinishedPulling="2025-10-13 13:38:22.908518397 +0000 UTC m=+1857.475902467" observedRunningTime="2025-10-13 13:38:23.428512413 +0000 UTC m=+1857.995896493" watchObservedRunningTime="2025-10-13 13:38:23.436808463 +0000 UTC m=+1858.004192533" Oct 13 13:39:04 crc kubenswrapper[4684]: I1013 13:39:04.828824 4684 generic.go:334] "Generic (PLEG): container finished" podID="7a51a7e3-30bb-4bce-889c-b13d919ef64c" containerID="1ef93736f471daeef327a7b9cdb582d83e5ea43a048cfa579727ca8dedcdd4bf" exitCode=0 Oct 13 13:39:04 crc kubenswrapper[4684]: I1013 13:39:04.828883 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" event={"ID":"7a51a7e3-30bb-4bce-889c-b13d919ef64c","Type":"ContainerDied","Data":"1ef93736f471daeef327a7b9cdb582d83e5ea43a048cfa579727ca8dedcdd4bf"} Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.282031 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.473951 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474355 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-repo-setup-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474377 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-ovn-default-certs-0\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474415 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-bootstrap-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474432 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-nova-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474452 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-libvirt-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474487 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-inventory\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474502 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-telemetry-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474529 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ssh-key\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474584 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7622m\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-kube-api-access-7622m\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474629 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ovn-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474648 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474668 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-neutron-metadata-combined-ca-bundle\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.474708 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\" (UID: \"7a51a7e3-30bb-4bce-889c-b13d919ef64c\") " Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.479826 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.480393 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.481857 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.481989 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.482007 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-kube-api-access-7622m" (OuterVolumeSpecName: "kube-api-access-7622m") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "kube-api-access-7622m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.482339 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.482834 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.483014 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.483655 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.483858 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.487647 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.491986 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.504957 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.514185 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-inventory" (OuterVolumeSpecName: "inventory") pod "7a51a7e3-30bb-4bce-889c-b13d919ef64c" (UID: "7a51a7e3-30bb-4bce-889c-b13d919ef64c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577061 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7622m\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-kube-api-access-7622m\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577099 4684 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577113 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577123 4684 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577135 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577145 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577155 4684 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577164 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7a51a7e3-30bb-4bce-889c-b13d919ef64c-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577173 4684 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577181 4684 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577190 4684 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577200 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577208 4684 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.577217 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7a51a7e3-30bb-4bce-889c-b13d919ef64c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.853128 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" event={"ID":"7a51a7e3-30bb-4bce-889c-b13d919ef64c","Type":"ContainerDied","Data":"58e298b6c0a3531b8bec2c9b92e37fb84735859d0bd14ddc6c95a1b6254c49af"} Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.853164 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58e298b6c0a3531b8bec2c9b92e37fb84735859d0bd14ddc6c95a1b6254c49af" Oct 13 13:39:06 crc kubenswrapper[4684]: I1013 13:39:06.853254 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hj69t" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.022719 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk"] Oct 13 13:39:07 crc kubenswrapper[4684]: E1013 13:39:07.023356 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a51a7e3-30bb-4bce-889c-b13d919ef64c" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.023380 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a51a7e3-30bb-4bce-889c-b13d919ef64c" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.023550 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a51a7e3-30bb-4bce-889c-b13d919ef64c" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.024220 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.026134 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.026230 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.026284 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.030268 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.030287 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.040751 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk"] Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.189279 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.189383 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqphc\" (UniqueName: \"kubernetes.io/projected/00a6e3dc-bdab-4eab-924c-37c33fecad3e-kube-api-access-mqphc\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.189643 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.189736 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.189797 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.291263 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.291408 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.291476 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.291610 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.291661 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqphc\" (UniqueName: \"kubernetes.io/projected/00a6e3dc-bdab-4eab-924c-37c33fecad3e-kube-api-access-mqphc\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.293841 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.296516 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.297171 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.302507 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.321809 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqphc\" (UniqueName: \"kubernetes.io/projected/00a6e3dc-bdab-4eab-924c-37c33fecad3e-kube-api-access-mqphc\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-p8cjk\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.349494 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:39:07 crc kubenswrapper[4684]: I1013 13:39:07.929304 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk"] Oct 13 13:39:08 crc kubenswrapper[4684]: I1013 13:39:08.874137 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" event={"ID":"00a6e3dc-bdab-4eab-924c-37c33fecad3e","Type":"ContainerStarted","Data":"4317d887fbb682cfe66a8e12c8b2843cce043f7a3551f107610e1bb43b2fc3f9"} Oct 13 13:39:08 crc kubenswrapper[4684]: I1013 13:39:08.874425 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" event={"ID":"00a6e3dc-bdab-4eab-924c-37c33fecad3e","Type":"ContainerStarted","Data":"c753776055fbb061b7bf7d8a9e85e1decb60ab53f6f500f74a69e20d5896b01c"} Oct 13 13:39:08 crc kubenswrapper[4684]: I1013 13:39:08.902653 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" podStartSLOduration=1.473401464 podStartE2EDuration="1.902632976s" podCreationTimestamp="2025-10-13 13:39:07 +0000 UTC" firstStartedPulling="2025-10-13 13:39:07.93534066 +0000 UTC m=+1902.502724740" lastFinishedPulling="2025-10-13 13:39:08.364572182 +0000 UTC m=+1902.931956252" observedRunningTime="2025-10-13 13:39:08.897276918 +0000 UTC m=+1903.464660988" watchObservedRunningTime="2025-10-13 13:39:08.902632976 +0000 UTC m=+1903.470017046" Oct 13 13:40:00 crc kubenswrapper[4684]: I1013 13:40:00.560537 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:40:00 crc kubenswrapper[4684]: I1013 13:40:00.561252 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:40:16 crc kubenswrapper[4684]: I1013 13:40:16.493355 4684 generic.go:334] "Generic (PLEG): container finished" podID="00a6e3dc-bdab-4eab-924c-37c33fecad3e" containerID="4317d887fbb682cfe66a8e12c8b2843cce043f7a3551f107610e1bb43b2fc3f9" exitCode=0 Oct 13 13:40:16 crc kubenswrapper[4684]: I1013 13:40:16.493448 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" event={"ID":"00a6e3dc-bdab-4eab-924c-37c33fecad3e","Type":"ContainerDied","Data":"4317d887fbb682cfe66a8e12c8b2843cce043f7a3551f107610e1bb43b2fc3f9"} Oct 13 13:40:17 crc kubenswrapper[4684]: I1013 13:40:17.898216 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.054721 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ssh-key\") pod \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.054821 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-inventory\") pod \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.054925 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqphc\" (UniqueName: \"kubernetes.io/projected/00a6e3dc-bdab-4eab-924c-37c33fecad3e-kube-api-access-mqphc\") pod \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.055013 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovncontroller-config-0\") pod \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.055058 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovn-combined-ca-bundle\") pod \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\" (UID: \"00a6e3dc-bdab-4eab-924c-37c33fecad3e\") " Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.073082 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "00a6e3dc-bdab-4eab-924c-37c33fecad3e" (UID: "00a6e3dc-bdab-4eab-924c-37c33fecad3e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.075295 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a6e3dc-bdab-4eab-924c-37c33fecad3e-kube-api-access-mqphc" (OuterVolumeSpecName: "kube-api-access-mqphc") pod "00a6e3dc-bdab-4eab-924c-37c33fecad3e" (UID: "00a6e3dc-bdab-4eab-924c-37c33fecad3e"). InnerVolumeSpecName "kube-api-access-mqphc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.083086 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-inventory" (OuterVolumeSpecName: "inventory") pod "00a6e3dc-bdab-4eab-924c-37c33fecad3e" (UID: "00a6e3dc-bdab-4eab-924c-37c33fecad3e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.088169 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "00a6e3dc-bdab-4eab-924c-37c33fecad3e" (UID: "00a6e3dc-bdab-4eab-924c-37c33fecad3e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.097297 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "00a6e3dc-bdab-4eab-924c-37c33fecad3e" (UID: "00a6e3dc-bdab-4eab-924c-37c33fecad3e"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.156738 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqphc\" (UniqueName: \"kubernetes.io/projected/00a6e3dc-bdab-4eab-924c-37c33fecad3e-kube-api-access-mqphc\") on node \"crc\" DevicePath \"\"" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.156776 4684 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.156788 4684 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.156798 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.156811 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a6e3dc-bdab-4eab-924c-37c33fecad3e-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.512814 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" event={"ID":"00a6e3dc-bdab-4eab-924c-37c33fecad3e","Type":"ContainerDied","Data":"c753776055fbb061b7bf7d8a9e85e1decb60ab53f6f500f74a69e20d5896b01c"} Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.512850 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c753776055fbb061b7bf7d8a9e85e1decb60ab53f6f500f74a69e20d5896b01c" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.512867 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-p8cjk" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.589697 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7"] Oct 13 13:40:18 crc kubenswrapper[4684]: E1013 13:40:18.590181 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a6e3dc-bdab-4eab-924c-37c33fecad3e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.590203 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a6e3dc-bdab-4eab-924c-37c33fecad3e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.590409 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a6e3dc-bdab-4eab-924c-37c33fecad3e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.591173 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.599006 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.600438 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.600458 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.600583 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.600616 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.602028 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.605017 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7"] Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.666043 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.666105 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.666138 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.666214 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.666289 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bspb4\" (UniqueName: \"kubernetes.io/projected/976ab3d8-44f4-4005-a286-439105b3d942-kube-api-access-bspb4\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.666341 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.767674 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.767826 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.767892 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.767961 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.768027 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.768143 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bspb4\" (UniqueName: \"kubernetes.io/projected/976ab3d8-44f4-4005-a286-439105b3d942-kube-api-access-bspb4\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.772853 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.772961 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.773263 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.773993 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.774864 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.789425 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bspb4\" (UniqueName: \"kubernetes.io/projected/976ab3d8-44f4-4005-a286-439105b3d942-kube-api-access-bspb4\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:18 crc kubenswrapper[4684]: I1013 13:40:18.908052 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:40:19 crc kubenswrapper[4684]: I1013 13:40:19.439329 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7"] Oct 13 13:40:19 crc kubenswrapper[4684]: I1013 13:40:19.522347 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" event={"ID":"976ab3d8-44f4-4005-a286-439105b3d942","Type":"ContainerStarted","Data":"113e7aade4c08a1949a29b55d8b81436617e0c89840e7d61e6defed8277aa6fb"} Oct 13 13:40:20 crc kubenswrapper[4684]: I1013 13:40:20.533826 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" event={"ID":"976ab3d8-44f4-4005-a286-439105b3d942","Type":"ContainerStarted","Data":"30ccc10622ad660421170a1c9226fa638416a95d2314e117f2643718adfb8ba3"} Oct 13 13:40:20 crc kubenswrapper[4684]: I1013 13:40:20.562757 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" podStartSLOduration=2.09861686 podStartE2EDuration="2.562737366s" podCreationTimestamp="2025-10-13 13:40:18 +0000 UTC" firstStartedPulling="2025-10-13 13:40:19.446114718 +0000 UTC m=+1974.013498798" lastFinishedPulling="2025-10-13 13:40:19.910235234 +0000 UTC m=+1974.477619304" observedRunningTime="2025-10-13 13:40:20.553820698 +0000 UTC m=+1975.121204768" watchObservedRunningTime="2025-10-13 13:40:20.562737366 +0000 UTC m=+1975.130121446" Oct 13 13:40:30 crc kubenswrapper[4684]: I1013 13:40:30.560534 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:40:30 crc kubenswrapper[4684]: I1013 13:40:30.561347 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.560322 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.560926 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.561025 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.561727 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ede402027974274ba6fe309180fbdcdddd9fb465f99a94370fe07cbd3a326e81"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.561779 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://ede402027974274ba6fe309180fbdcdddd9fb465f99a94370fe07cbd3a326e81" gracePeriod=600 Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.929615 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="ede402027974274ba6fe309180fbdcdddd9fb465f99a94370fe07cbd3a326e81" exitCode=0 Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.929671 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"ede402027974274ba6fe309180fbdcdddd9fb465f99a94370fe07cbd3a326e81"} Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.930043 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d"} Oct 13 13:41:00 crc kubenswrapper[4684]: I1013 13:41:00.930066 4684 scope.go:117] "RemoveContainer" containerID="545faba09e24166036d735fa7a4b647c1abb91a0b72369b78abc75f6d5f8b150" Oct 13 13:41:12 crc kubenswrapper[4684]: I1013 13:41:12.022705 4684 generic.go:334] "Generic (PLEG): container finished" podID="976ab3d8-44f4-4005-a286-439105b3d942" containerID="30ccc10622ad660421170a1c9226fa638416a95d2314e117f2643718adfb8ba3" exitCode=0 Oct 13 13:41:12 crc kubenswrapper[4684]: I1013 13:41:12.022830 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" event={"ID":"976ab3d8-44f4-4005-a286-439105b3d942","Type":"ContainerDied","Data":"30ccc10622ad660421170a1c9226fa638416a95d2314e117f2643718adfb8ba3"} Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.527122 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.653726 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-metadata-combined-ca-bundle\") pod \"976ab3d8-44f4-4005-a286-439105b3d942\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.653960 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bspb4\" (UniqueName: \"kubernetes.io/projected/976ab3d8-44f4-4005-a286-439105b3d942-kube-api-access-bspb4\") pod \"976ab3d8-44f4-4005-a286-439105b3d942\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.654013 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-ovn-metadata-agent-neutron-config-0\") pod \"976ab3d8-44f4-4005-a286-439105b3d942\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.654075 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-inventory\") pod \"976ab3d8-44f4-4005-a286-439105b3d942\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.654138 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-nova-metadata-neutron-config-0\") pod \"976ab3d8-44f4-4005-a286-439105b3d942\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.654172 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-ssh-key\") pod \"976ab3d8-44f4-4005-a286-439105b3d942\" (UID: \"976ab3d8-44f4-4005-a286-439105b3d942\") " Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.659489 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/976ab3d8-44f4-4005-a286-439105b3d942-kube-api-access-bspb4" (OuterVolumeSpecName: "kube-api-access-bspb4") pod "976ab3d8-44f4-4005-a286-439105b3d942" (UID: "976ab3d8-44f4-4005-a286-439105b3d942"). InnerVolumeSpecName "kube-api-access-bspb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.659674 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "976ab3d8-44f4-4005-a286-439105b3d942" (UID: "976ab3d8-44f4-4005-a286-439105b3d942"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.683828 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "976ab3d8-44f4-4005-a286-439105b3d942" (UID: "976ab3d8-44f4-4005-a286-439105b3d942"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.685272 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-inventory" (OuterVolumeSpecName: "inventory") pod "976ab3d8-44f4-4005-a286-439105b3d942" (UID: "976ab3d8-44f4-4005-a286-439105b3d942"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.691850 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "976ab3d8-44f4-4005-a286-439105b3d942" (UID: "976ab3d8-44f4-4005-a286-439105b3d942"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.708337 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "976ab3d8-44f4-4005-a286-439105b3d942" (UID: "976ab3d8-44f4-4005-a286-439105b3d942"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.757363 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.757418 4684 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.757440 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.757460 4684 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.757481 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bspb4\" (UniqueName: \"kubernetes.io/projected/976ab3d8-44f4-4005-a286-439105b3d942-kube-api-access-bspb4\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:13 crc kubenswrapper[4684]: I1013 13:41:13.757501 4684 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/976ab3d8-44f4-4005-a286-439105b3d942-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.040177 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" event={"ID":"976ab3d8-44f4-4005-a286-439105b3d942","Type":"ContainerDied","Data":"113e7aade4c08a1949a29b55d8b81436617e0c89840e7d61e6defed8277aa6fb"} Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.040233 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.040235 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="113e7aade4c08a1949a29b55d8b81436617e0c89840e7d61e6defed8277aa6fb" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.130791 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r"] Oct 13 13:41:14 crc kubenswrapper[4684]: E1013 13:41:14.131444 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="976ab3d8-44f4-4005-a286-439105b3d942" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.131467 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="976ab3d8-44f4-4005-a286-439105b3d942" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.131636 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="976ab3d8-44f4-4005-a286-439105b3d942" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.132278 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.135134 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.135309 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.136268 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.136548 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.136665 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.144514 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r"] Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.266014 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.266416 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.266612 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.266786 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zskd\" (UniqueName: \"kubernetes.io/projected/4281de73-4320-444b-9d71-877c9cf226a0-kube-api-access-8zskd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.267021 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.370596 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.370667 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zskd\" (UniqueName: \"kubernetes.io/projected/4281de73-4320-444b-9d71-877c9cf226a0-kube-api-access-8zskd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.370800 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.370850 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.370956 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.377207 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.377268 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.377470 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.378427 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.388854 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zskd\" (UniqueName: \"kubernetes.io/projected/4281de73-4320-444b-9d71-877c9cf226a0-kube-api-access-8zskd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cj82r\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.449079 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:41:14 crc kubenswrapper[4684]: I1013 13:41:14.970615 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r"] Oct 13 13:41:15 crc kubenswrapper[4684]: I1013 13:41:15.051125 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" event={"ID":"4281de73-4320-444b-9d71-877c9cf226a0","Type":"ContainerStarted","Data":"4f632e8880dad60f59fe65217384d87caba533a9d8de3b82d245986fc7319485"} Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.059635 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" event={"ID":"4281de73-4320-444b-9d71-877c9cf226a0","Type":"ContainerStarted","Data":"0e505e3b3c9776989e2c19d630aa833b6fcd347cbd186cc53cd1217a5f2b900a"} Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.081826 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" podStartSLOduration=1.5261858830000001 podStartE2EDuration="2.081810054s" podCreationTimestamp="2025-10-13 13:41:14 +0000 UTC" firstStartedPulling="2025-10-13 13:41:14.975452207 +0000 UTC m=+2029.542836277" lastFinishedPulling="2025-10-13 13:41:15.531076378 +0000 UTC m=+2030.098460448" observedRunningTime="2025-10-13 13:41:16.08137287 +0000 UTC m=+2030.648756940" watchObservedRunningTime="2025-10-13 13:41:16.081810054 +0000 UTC m=+2030.649194124" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.686538 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nn75h"] Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.688984 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.708142 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nn75h"] Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.831458 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-catalog-content\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.831655 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-utilities\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.831923 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6dsn\" (UniqueName: \"kubernetes.io/projected/d9879400-00b6-4c8d-ba61-b34ab04e481a-kube-api-access-n6dsn\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.934238 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6dsn\" (UniqueName: \"kubernetes.io/projected/d9879400-00b6-4c8d-ba61-b34ab04e481a-kube-api-access-n6dsn\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.934344 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-catalog-content\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.934413 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-utilities\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.934842 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-utilities\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.935089 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-catalog-content\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:16 crc kubenswrapper[4684]: I1013 13:41:16.970411 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6dsn\" (UniqueName: \"kubernetes.io/projected/d9879400-00b6-4c8d-ba61-b34ab04e481a-kube-api-access-n6dsn\") pod \"certified-operators-nn75h\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:17 crc kubenswrapper[4684]: I1013 13:41:17.029709 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:17 crc kubenswrapper[4684]: I1013 13:41:17.603762 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nn75h"] Oct 13 13:41:18 crc kubenswrapper[4684]: I1013 13:41:18.101813 4684 generic.go:334] "Generic (PLEG): container finished" podID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerID="2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302" exitCode=0 Oct 13 13:41:18 crc kubenswrapper[4684]: I1013 13:41:18.101961 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerDied","Data":"2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302"} Oct 13 13:41:18 crc kubenswrapper[4684]: I1013 13:41:18.102170 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerStarted","Data":"d03557fca0e0b8e64d9b6e062fd0005f1d4d517242e36ef7f9cceeddf21b94a3"} Oct 13 13:41:19 crc kubenswrapper[4684]: I1013 13:41:19.111789 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerStarted","Data":"ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47"} Oct 13 13:41:20 crc kubenswrapper[4684]: I1013 13:41:20.121730 4684 generic.go:334] "Generic (PLEG): container finished" podID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerID="ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47" exitCode=0 Oct 13 13:41:20 crc kubenswrapper[4684]: I1013 13:41:20.121776 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerDied","Data":"ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47"} Oct 13 13:41:21 crc kubenswrapper[4684]: I1013 13:41:21.131369 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerStarted","Data":"0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6"} Oct 13 13:41:21 crc kubenswrapper[4684]: I1013 13:41:21.163424 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nn75h" podStartSLOduration=2.689015803 podStartE2EDuration="5.163405236s" podCreationTimestamp="2025-10-13 13:41:16 +0000 UTC" firstStartedPulling="2025-10-13 13:41:18.104698426 +0000 UTC m=+2032.672082506" lastFinishedPulling="2025-10-13 13:41:20.579087869 +0000 UTC m=+2035.146471939" observedRunningTime="2025-10-13 13:41:21.156752218 +0000 UTC m=+2035.724136288" watchObservedRunningTime="2025-10-13 13:41:21.163405236 +0000 UTC m=+2035.730789326" Oct 13 13:41:27 crc kubenswrapper[4684]: I1013 13:41:27.030786 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:27 crc kubenswrapper[4684]: I1013 13:41:27.031656 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:27 crc kubenswrapper[4684]: I1013 13:41:27.099183 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:27 crc kubenswrapper[4684]: I1013 13:41:27.282413 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:27 crc kubenswrapper[4684]: I1013 13:41:27.339831 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nn75h"] Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.222049 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nn75h" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="registry-server" containerID="cri-o://0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6" gracePeriod=2 Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.864924 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.903890 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-utilities\") pod \"d9879400-00b6-4c8d-ba61-b34ab04e481a\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.904039 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-catalog-content\") pod \"d9879400-00b6-4c8d-ba61-b34ab04e481a\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.904109 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6dsn\" (UniqueName: \"kubernetes.io/projected/d9879400-00b6-4c8d-ba61-b34ab04e481a-kube-api-access-n6dsn\") pod \"d9879400-00b6-4c8d-ba61-b34ab04e481a\" (UID: \"d9879400-00b6-4c8d-ba61-b34ab04e481a\") " Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.905096 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-utilities" (OuterVolumeSpecName: "utilities") pod "d9879400-00b6-4c8d-ba61-b34ab04e481a" (UID: "d9879400-00b6-4c8d-ba61-b34ab04e481a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.912071 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9879400-00b6-4c8d-ba61-b34ab04e481a-kube-api-access-n6dsn" (OuterVolumeSpecName: "kube-api-access-n6dsn") pod "d9879400-00b6-4c8d-ba61-b34ab04e481a" (UID: "d9879400-00b6-4c8d-ba61-b34ab04e481a"). InnerVolumeSpecName "kube-api-access-n6dsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:41:29 crc kubenswrapper[4684]: I1013 13:41:29.959054 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9879400-00b6-4c8d-ba61-b34ab04e481a" (UID: "d9879400-00b6-4c8d-ba61-b34ab04e481a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.007137 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6dsn\" (UniqueName: \"kubernetes.io/projected/d9879400-00b6-4c8d-ba61-b34ab04e481a-kube-api-access-n6dsn\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.007485 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.007682 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9879400-00b6-4c8d-ba61-b34ab04e481a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.232685 4684 generic.go:334] "Generic (PLEG): container finished" podID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerID="0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6" exitCode=0 Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.232802 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerDied","Data":"0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6"} Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.233940 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nn75h" event={"ID":"d9879400-00b6-4c8d-ba61-b34ab04e481a","Type":"ContainerDied","Data":"d03557fca0e0b8e64d9b6e062fd0005f1d4d517242e36ef7f9cceeddf21b94a3"} Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.234020 4684 scope.go:117] "RemoveContainer" containerID="0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.232868 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nn75h" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.266960 4684 scope.go:117] "RemoveContainer" containerID="ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.305711 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nn75h"] Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.323043 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nn75h"] Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.340243 4684 scope.go:117] "RemoveContainer" containerID="2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.366253 4684 scope.go:117] "RemoveContainer" containerID="0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.366263 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" path="/var/lib/kubelet/pods/d9879400-00b6-4c8d-ba61-b34ab04e481a/volumes" Oct 13 13:41:30 crc kubenswrapper[4684]: E1013 13:41:30.366738 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6\": container with ID starting with 0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6 not found: ID does not exist" containerID="0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.366788 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6"} err="failed to get container status \"0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6\": rpc error: code = NotFound desc = could not find container \"0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6\": container with ID starting with 0339906197b2786a0276c03b7339bde0f502f8353a5db664b1c9c568d71844b6 not found: ID does not exist" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.366817 4684 scope.go:117] "RemoveContainer" containerID="ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47" Oct 13 13:41:30 crc kubenswrapper[4684]: E1013 13:41:30.367573 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47\": container with ID starting with ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47 not found: ID does not exist" containerID="ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.367606 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47"} err="failed to get container status \"ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47\": rpc error: code = NotFound desc = could not find container \"ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47\": container with ID starting with ae39838f1dd5c10405eef5b96fa909d023cba1f519e9d783fb3ba0df03b40b47 not found: ID does not exist" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.367625 4684 scope.go:117] "RemoveContainer" containerID="2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302" Oct 13 13:41:30 crc kubenswrapper[4684]: E1013 13:41:30.369404 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302\": container with ID starting with 2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302 not found: ID does not exist" containerID="2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302" Oct 13 13:41:30 crc kubenswrapper[4684]: I1013 13:41:30.369462 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302"} err="failed to get container status \"2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302\": rpc error: code = NotFound desc = could not find container \"2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302\": container with ID starting with 2fb291919ed701940d581be479411bdbe877f0aad8e4641cb63db72fbf33f302 not found: ID does not exist" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.987621 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d6bdt"] Oct 13 13:42:06 crc kubenswrapper[4684]: E1013 13:42:06.988691 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="registry-server" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.988710 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="registry-server" Oct 13 13:42:06 crc kubenswrapper[4684]: E1013 13:42:06.988729 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="extract-content" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.988737 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="extract-content" Oct 13 13:42:06 crc kubenswrapper[4684]: E1013 13:42:06.988759 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="extract-utilities" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.988768 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="extract-utilities" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.989096 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9879400-00b6-4c8d-ba61-b34ab04e481a" containerName="registry-server" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.990797 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:06 crc kubenswrapper[4684]: I1013 13:42:06.998766 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d6bdt"] Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.134232 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-utilities\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.134307 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-catalog-content\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.134503 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j87sq\" (UniqueName: \"kubernetes.io/projected/94659f83-b3ae-4adb-ae04-6cd0d54b6567-kube-api-access-j87sq\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.236350 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-catalog-content\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.236544 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j87sq\" (UniqueName: \"kubernetes.io/projected/94659f83-b3ae-4adb-ae04-6cd0d54b6567-kube-api-access-j87sq\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.236568 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-utilities\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.237069 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-catalog-content\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.237079 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-utilities\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.255166 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j87sq\" (UniqueName: \"kubernetes.io/projected/94659f83-b3ae-4adb-ae04-6cd0d54b6567-kube-api-access-j87sq\") pod \"community-operators-d6bdt\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:07 crc kubenswrapper[4684]: I1013 13:42:07.349390 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:08 crc kubenswrapper[4684]: I1013 13:42:08.002249 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d6bdt"] Oct 13 13:42:08 crc kubenswrapper[4684]: W1013 13:42:08.011280 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94659f83_b3ae_4adb_ae04_6cd0d54b6567.slice/crio-75c8017b3e7f978e0188aecca4bf39db7f2e77ac73531cf53badfc199154b9d4 WatchSource:0}: Error finding container 75c8017b3e7f978e0188aecca4bf39db7f2e77ac73531cf53badfc199154b9d4: Status 404 returned error can't find the container with id 75c8017b3e7f978e0188aecca4bf39db7f2e77ac73531cf53badfc199154b9d4 Oct 13 13:42:08 crc kubenswrapper[4684]: I1013 13:42:08.665676 4684 generic.go:334] "Generic (PLEG): container finished" podID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerID="85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9" exitCode=0 Oct 13 13:42:08 crc kubenswrapper[4684]: I1013 13:42:08.665769 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerDied","Data":"85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9"} Oct 13 13:42:08 crc kubenswrapper[4684]: I1013 13:42:08.666137 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerStarted","Data":"75c8017b3e7f978e0188aecca4bf39db7f2e77ac73531cf53badfc199154b9d4"} Oct 13 13:42:10 crc kubenswrapper[4684]: I1013 13:42:10.687100 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerStarted","Data":"49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0"} Oct 13 13:42:11 crc kubenswrapper[4684]: I1013 13:42:11.698107 4684 generic.go:334] "Generic (PLEG): container finished" podID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerID="49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0" exitCode=0 Oct 13 13:42:11 crc kubenswrapper[4684]: I1013 13:42:11.698208 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerDied","Data":"49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0"} Oct 13 13:42:12 crc kubenswrapper[4684]: I1013 13:42:12.713436 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerStarted","Data":"9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb"} Oct 13 13:42:12 crc kubenswrapper[4684]: I1013 13:42:12.738105 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d6bdt" podStartSLOduration=3.297367806 podStartE2EDuration="6.738083183s" podCreationTimestamp="2025-10-13 13:42:06 +0000 UTC" firstStartedPulling="2025-10-13 13:42:08.667320887 +0000 UTC m=+2083.234704957" lastFinishedPulling="2025-10-13 13:42:12.108036264 +0000 UTC m=+2086.675420334" observedRunningTime="2025-10-13 13:42:12.730177774 +0000 UTC m=+2087.297561864" watchObservedRunningTime="2025-10-13 13:42:12.738083183 +0000 UTC m=+2087.305467263" Oct 13 13:42:17 crc kubenswrapper[4684]: I1013 13:42:17.350529 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:17 crc kubenswrapper[4684]: I1013 13:42:17.350860 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:17 crc kubenswrapper[4684]: I1013 13:42:17.403997 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:17 crc kubenswrapper[4684]: I1013 13:42:17.814933 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:17 crc kubenswrapper[4684]: I1013 13:42:17.869938 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d6bdt"] Oct 13 13:42:19 crc kubenswrapper[4684]: I1013 13:42:19.777808 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d6bdt" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="registry-server" containerID="cri-o://9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb" gracePeriod=2 Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.250407 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.435460 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-catalog-content\") pod \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.435598 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-utilities\") pod \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.435684 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j87sq\" (UniqueName: \"kubernetes.io/projected/94659f83-b3ae-4adb-ae04-6cd0d54b6567-kube-api-access-j87sq\") pod \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\" (UID: \"94659f83-b3ae-4adb-ae04-6cd0d54b6567\") " Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.436435 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-utilities" (OuterVolumeSpecName: "utilities") pod "94659f83-b3ae-4adb-ae04-6cd0d54b6567" (UID: "94659f83-b3ae-4adb-ae04-6cd0d54b6567"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.449262 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94659f83-b3ae-4adb-ae04-6cd0d54b6567-kube-api-access-j87sq" (OuterVolumeSpecName: "kube-api-access-j87sq") pod "94659f83-b3ae-4adb-ae04-6cd0d54b6567" (UID: "94659f83-b3ae-4adb-ae04-6cd0d54b6567"). InnerVolumeSpecName "kube-api-access-j87sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.522087 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94659f83-b3ae-4adb-ae04-6cd0d54b6567" (UID: "94659f83-b3ae-4adb-ae04-6cd0d54b6567"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.538543 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.538616 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94659f83-b3ae-4adb-ae04-6cd0d54b6567-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.538631 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j87sq\" (UniqueName: \"kubernetes.io/projected/94659f83-b3ae-4adb-ae04-6cd0d54b6567-kube-api-access-j87sq\") on node \"crc\" DevicePath \"\"" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.790678 4684 generic.go:334] "Generic (PLEG): container finished" podID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerID="9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb" exitCode=0 Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.790704 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6bdt" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.790727 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerDied","Data":"9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb"} Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.790779 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6bdt" event={"ID":"94659f83-b3ae-4adb-ae04-6cd0d54b6567","Type":"ContainerDied","Data":"75c8017b3e7f978e0188aecca4bf39db7f2e77ac73531cf53badfc199154b9d4"} Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.790800 4684 scope.go:117] "RemoveContainer" containerID="9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.823786 4684 scope.go:117] "RemoveContainer" containerID="49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.825854 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d6bdt"] Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.843156 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d6bdt"] Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.849594 4684 scope.go:117] "RemoveContainer" containerID="85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.883684 4684 scope.go:117] "RemoveContainer" containerID="9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb" Oct 13 13:42:20 crc kubenswrapper[4684]: E1013 13:42:20.884157 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb\": container with ID starting with 9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb not found: ID does not exist" containerID="9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.884276 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb"} err="failed to get container status \"9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb\": rpc error: code = NotFound desc = could not find container \"9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb\": container with ID starting with 9c7a329091afb48b12dad902786a342e9faa7829a5b31daeac43d45b0febc1eb not found: ID does not exist" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.884405 4684 scope.go:117] "RemoveContainer" containerID="49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0" Oct 13 13:42:20 crc kubenswrapper[4684]: E1013 13:42:20.885254 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0\": container with ID starting with 49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0 not found: ID does not exist" containerID="49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.885358 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0"} err="failed to get container status \"49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0\": rpc error: code = NotFound desc = could not find container \"49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0\": container with ID starting with 49afac82757dde84d90e6a06c65da890c8bf3d998e35374b1a433305d2e745d0 not found: ID does not exist" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.885552 4684 scope.go:117] "RemoveContainer" containerID="85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9" Oct 13 13:42:20 crc kubenswrapper[4684]: E1013 13:42:20.886078 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9\": container with ID starting with 85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9 not found: ID does not exist" containerID="85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9" Oct 13 13:42:20 crc kubenswrapper[4684]: I1013 13:42:20.886114 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9"} err="failed to get container status \"85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9\": rpc error: code = NotFound desc = could not find container \"85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9\": container with ID starting with 85d9cdd870bad70403c9d5305676cb9c12bbe1d2244d259018cea628a77271b9 not found: ID does not exist" Oct 13 13:42:22 crc kubenswrapper[4684]: I1013 13:42:22.373065 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" path="/var/lib/kubelet/pods/94659f83-b3ae-4adb-ae04-6cd0d54b6567/volumes" Oct 13 13:43:00 crc kubenswrapper[4684]: I1013 13:43:00.559754 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:43:00 crc kubenswrapper[4684]: I1013 13:43:00.561196 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:43:30 crc kubenswrapper[4684]: I1013 13:43:30.560605 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:43:30 crc kubenswrapper[4684]: I1013 13:43:30.561192 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.560175 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.560825 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.560880 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.561726 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.561788 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" gracePeriod=600 Oct 13 13:44:00 crc kubenswrapper[4684]: E1013 13:44:00.685514 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.851592 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" exitCode=0 Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.851648 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d"} Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.851692 4684 scope.go:117] "RemoveContainer" containerID="ede402027974274ba6fe309180fbdcdddd9fb465f99a94370fe07cbd3a326e81" Oct 13 13:44:00 crc kubenswrapper[4684]: I1013 13:44:00.852464 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:44:00 crc kubenswrapper[4684]: E1013 13:44:00.852744 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.453934 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2shfz"] Oct 13 13:44:08 crc kubenswrapper[4684]: E1013 13:44:08.457452 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="extract-utilities" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.457485 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="extract-utilities" Oct 13 13:44:08 crc kubenswrapper[4684]: E1013 13:44:08.457512 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="extract-content" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.457520 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="extract-content" Oct 13 13:44:08 crc kubenswrapper[4684]: E1013 13:44:08.457542 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="registry-server" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.457547 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="registry-server" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.457743 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="94659f83-b3ae-4adb-ae04-6cd0d54b6567" containerName="registry-server" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.459236 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.476876 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2shfz"] Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.615744 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-catalog-content\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.616078 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-utilities\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.616145 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w6v2\" (UniqueName: \"kubernetes.io/projected/6347f664-9d07-4050-96ed-11da6506909f-kube-api-access-4w6v2\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.717886 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-catalog-content\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.717951 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-utilities\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.718017 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w6v2\" (UniqueName: \"kubernetes.io/projected/6347f664-9d07-4050-96ed-11da6506909f-kube-api-access-4w6v2\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.718469 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-utilities\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.718848 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-catalog-content\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.736757 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w6v2\" (UniqueName: \"kubernetes.io/projected/6347f664-9d07-4050-96ed-11da6506909f-kube-api-access-4w6v2\") pod \"redhat-marketplace-2shfz\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:08 crc kubenswrapper[4684]: I1013 13:44:08.794993 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:09 crc kubenswrapper[4684]: I1013 13:44:09.234091 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2shfz"] Oct 13 13:44:09 crc kubenswrapper[4684]: W1013 13:44:09.240097 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6347f664_9d07_4050_96ed_11da6506909f.slice/crio-163f62a4400c7e7d779ff3bce74d83bde7334d6c6bd018b3c497bbfbd07d0a43 WatchSource:0}: Error finding container 163f62a4400c7e7d779ff3bce74d83bde7334d6c6bd018b3c497bbfbd07d0a43: Status 404 returned error can't find the container with id 163f62a4400c7e7d779ff3bce74d83bde7334d6c6bd018b3c497bbfbd07d0a43 Oct 13 13:44:09 crc kubenswrapper[4684]: I1013 13:44:09.940712 4684 generic.go:334] "Generic (PLEG): container finished" podID="6347f664-9d07-4050-96ed-11da6506909f" containerID="056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d" exitCode=0 Oct 13 13:44:09 crc kubenswrapper[4684]: I1013 13:44:09.940770 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2shfz" event={"ID":"6347f664-9d07-4050-96ed-11da6506909f","Type":"ContainerDied","Data":"056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d"} Oct 13 13:44:09 crc kubenswrapper[4684]: I1013 13:44:09.940820 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2shfz" event={"ID":"6347f664-9d07-4050-96ed-11da6506909f","Type":"ContainerStarted","Data":"163f62a4400c7e7d779ff3bce74d83bde7334d6c6bd018b3c497bbfbd07d0a43"} Oct 13 13:44:09 crc kubenswrapper[4684]: I1013 13:44:09.942520 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:44:10 crc kubenswrapper[4684]: I1013 13:44:10.954290 4684 generic.go:334] "Generic (PLEG): container finished" podID="6347f664-9d07-4050-96ed-11da6506909f" containerID="e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83" exitCode=0 Oct 13 13:44:10 crc kubenswrapper[4684]: I1013 13:44:10.954356 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2shfz" event={"ID":"6347f664-9d07-4050-96ed-11da6506909f","Type":"ContainerDied","Data":"e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83"} Oct 13 13:44:11 crc kubenswrapper[4684]: I1013 13:44:11.964775 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2shfz" event={"ID":"6347f664-9d07-4050-96ed-11da6506909f","Type":"ContainerStarted","Data":"8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9"} Oct 13 13:44:11 crc kubenswrapper[4684]: I1013 13:44:11.989277 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2shfz" podStartSLOduration=2.571759131 podStartE2EDuration="3.989259215s" podCreationTimestamp="2025-10-13 13:44:08 +0000 UTC" firstStartedPulling="2025-10-13 13:44:09.942258889 +0000 UTC m=+2204.509642959" lastFinishedPulling="2025-10-13 13:44:11.359758973 +0000 UTC m=+2205.927143043" observedRunningTime="2025-10-13 13:44:11.980740279 +0000 UTC m=+2206.548124349" watchObservedRunningTime="2025-10-13 13:44:11.989259215 +0000 UTC m=+2206.556643285" Oct 13 13:44:14 crc kubenswrapper[4684]: I1013 13:44:14.351194 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:44:14 crc kubenswrapper[4684]: E1013 13:44:14.352660 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:44:18 crc kubenswrapper[4684]: I1013 13:44:18.796013 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:18 crc kubenswrapper[4684]: I1013 13:44:18.796568 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:18 crc kubenswrapper[4684]: I1013 13:44:18.863447 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:19 crc kubenswrapper[4684]: I1013 13:44:19.075876 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:19 crc kubenswrapper[4684]: I1013 13:44:19.127918 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2shfz"] Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.046702 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2shfz" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="registry-server" containerID="cri-o://8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9" gracePeriod=2 Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.502803 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.577982 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-utilities\") pod \"6347f664-9d07-4050-96ed-11da6506909f\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.578043 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-catalog-content\") pod \"6347f664-9d07-4050-96ed-11da6506909f\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.578168 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w6v2\" (UniqueName: \"kubernetes.io/projected/6347f664-9d07-4050-96ed-11da6506909f-kube-api-access-4w6v2\") pod \"6347f664-9d07-4050-96ed-11da6506909f\" (UID: \"6347f664-9d07-4050-96ed-11da6506909f\") " Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.578915 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-utilities" (OuterVolumeSpecName: "utilities") pod "6347f664-9d07-4050-96ed-11da6506909f" (UID: "6347f664-9d07-4050-96ed-11da6506909f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.584040 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6347f664-9d07-4050-96ed-11da6506909f-kube-api-access-4w6v2" (OuterVolumeSpecName: "kube-api-access-4w6v2") pod "6347f664-9d07-4050-96ed-11da6506909f" (UID: "6347f664-9d07-4050-96ed-11da6506909f"). InnerVolumeSpecName "kube-api-access-4w6v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.601042 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6347f664-9d07-4050-96ed-11da6506909f" (UID: "6347f664-9d07-4050-96ed-11da6506909f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.681354 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.681441 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w6v2\" (UniqueName: \"kubernetes.io/projected/6347f664-9d07-4050-96ed-11da6506909f-kube-api-access-4w6v2\") on node \"crc\" DevicePath \"\"" Oct 13 13:44:21 crc kubenswrapper[4684]: I1013 13:44:21.681463 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6347f664-9d07-4050-96ed-11da6506909f-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.058614 4684 generic.go:334] "Generic (PLEG): container finished" podID="6347f664-9d07-4050-96ed-11da6506909f" containerID="8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9" exitCode=0 Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.058675 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2shfz" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.058671 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2shfz" event={"ID":"6347f664-9d07-4050-96ed-11da6506909f","Type":"ContainerDied","Data":"8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9"} Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.060873 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2shfz" event={"ID":"6347f664-9d07-4050-96ed-11da6506909f","Type":"ContainerDied","Data":"163f62a4400c7e7d779ff3bce74d83bde7334d6c6bd018b3c497bbfbd07d0a43"} Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.060923 4684 scope.go:117] "RemoveContainer" containerID="8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.087607 4684 scope.go:117] "RemoveContainer" containerID="e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.117409 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2shfz"] Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.127750 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2shfz"] Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.138974 4684 scope.go:117] "RemoveContainer" containerID="056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.166688 4684 scope.go:117] "RemoveContainer" containerID="8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9" Oct 13 13:44:22 crc kubenswrapper[4684]: E1013 13:44:22.167341 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9\": container with ID starting with 8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9 not found: ID does not exist" containerID="8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.167368 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9"} err="failed to get container status \"8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9\": rpc error: code = NotFound desc = could not find container \"8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9\": container with ID starting with 8fd5ed02de598997d941f0da771abb02011564b38974149751c37e9cfc2086c9 not found: ID does not exist" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.167392 4684 scope.go:117] "RemoveContainer" containerID="e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83" Oct 13 13:44:22 crc kubenswrapper[4684]: E1013 13:44:22.168957 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83\": container with ID starting with e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83 not found: ID does not exist" containerID="e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.168999 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83"} err="failed to get container status \"e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83\": rpc error: code = NotFound desc = could not find container \"e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83\": container with ID starting with e42889ee993f76d5b99e847dab1f9675a7fc763798255add6170182a3e96ac83 not found: ID does not exist" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.169026 4684 scope.go:117] "RemoveContainer" containerID="056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d" Oct 13 13:44:22 crc kubenswrapper[4684]: E1013 13:44:22.169355 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d\": container with ID starting with 056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d not found: ID does not exist" containerID="056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.169403 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d"} err="failed to get container status \"056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d\": rpc error: code = NotFound desc = could not find container \"056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d\": container with ID starting with 056d447005281e6a9bed8ddc4ddc900ee0ae875b6bf9f00609e35f6f73adcc5d not found: ID does not exist" Oct 13 13:44:22 crc kubenswrapper[4684]: I1013 13:44:22.363921 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6347f664-9d07-4050-96ed-11da6506909f" path="/var/lib/kubelet/pods/6347f664-9d07-4050-96ed-11da6506909f/volumes" Oct 13 13:44:27 crc kubenswrapper[4684]: I1013 13:44:27.351185 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:44:27 crc kubenswrapper[4684]: E1013 13:44:27.352242 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:44:40 crc kubenswrapper[4684]: I1013 13:44:40.351599 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:44:40 crc kubenswrapper[4684]: E1013 13:44:40.352956 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:44:52 crc kubenswrapper[4684]: I1013 13:44:52.351036 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:44:52 crc kubenswrapper[4684]: E1013 13:44:52.351721 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.142679 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b"] Oct 13 13:45:00 crc kubenswrapper[4684]: E1013 13:45:00.143615 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="extract-utilities" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.143631 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="extract-utilities" Oct 13 13:45:00 crc kubenswrapper[4684]: E1013 13:45:00.143657 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="registry-server" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.143665 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="registry-server" Oct 13 13:45:00 crc kubenswrapper[4684]: E1013 13:45:00.143686 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="extract-content" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.143695 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="extract-content" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.143942 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="6347f664-9d07-4050-96ed-11da6506909f" containerName="registry-server" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.144738 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.148620 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.148854 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.153068 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b"] Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.265321 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whkwh\" (UniqueName: \"kubernetes.io/projected/641c0b65-bee3-4545-8e1f-4af4ec977e1f-kube-api-access-whkwh\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.265396 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/641c0b65-bee3-4545-8e1f-4af4ec977e1f-config-volume\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.265441 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/641c0b65-bee3-4545-8e1f-4af4ec977e1f-secret-volume\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.367387 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whkwh\" (UniqueName: \"kubernetes.io/projected/641c0b65-bee3-4545-8e1f-4af4ec977e1f-kube-api-access-whkwh\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.367449 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/641c0b65-bee3-4545-8e1f-4af4ec977e1f-config-volume\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.367513 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/641c0b65-bee3-4545-8e1f-4af4ec977e1f-secret-volume\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.368972 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/641c0b65-bee3-4545-8e1f-4af4ec977e1f-config-volume\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.373854 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/641c0b65-bee3-4545-8e1f-4af4ec977e1f-secret-volume\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.383669 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whkwh\" (UniqueName: \"kubernetes.io/projected/641c0b65-bee3-4545-8e1f-4af4ec977e1f-kube-api-access-whkwh\") pod \"collect-profiles-29339385-7q54b\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.468277 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:00 crc kubenswrapper[4684]: I1013 13:45:00.908168 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b"] Oct 13 13:45:01 crc kubenswrapper[4684]: I1013 13:45:01.473747 4684 generic.go:334] "Generic (PLEG): container finished" podID="641c0b65-bee3-4545-8e1f-4af4ec977e1f" containerID="a13ec651c8494541e0bb3890aa4772af417e3243f1bf0ec34e212a97b2e1731a" exitCode=0 Oct 13 13:45:01 crc kubenswrapper[4684]: I1013 13:45:01.473794 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" event={"ID":"641c0b65-bee3-4545-8e1f-4af4ec977e1f","Type":"ContainerDied","Data":"a13ec651c8494541e0bb3890aa4772af417e3243f1bf0ec34e212a97b2e1731a"} Oct 13 13:45:01 crc kubenswrapper[4684]: I1013 13:45:01.473826 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" event={"ID":"641c0b65-bee3-4545-8e1f-4af4ec977e1f","Type":"ContainerStarted","Data":"dda5bef509a7d3d895e9f91c468fddd7c598d9d736cccb2208b14a6d6acaa494"} Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.791091 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.818892 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whkwh\" (UniqueName: \"kubernetes.io/projected/641c0b65-bee3-4545-8e1f-4af4ec977e1f-kube-api-access-whkwh\") pod \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.819042 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/641c0b65-bee3-4545-8e1f-4af4ec977e1f-config-volume\") pod \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.819161 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/641c0b65-bee3-4545-8e1f-4af4ec977e1f-secret-volume\") pod \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\" (UID: \"641c0b65-bee3-4545-8e1f-4af4ec977e1f\") " Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.822087 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/641c0b65-bee3-4545-8e1f-4af4ec977e1f-config-volume" (OuterVolumeSpecName: "config-volume") pod "641c0b65-bee3-4545-8e1f-4af4ec977e1f" (UID: "641c0b65-bee3-4545-8e1f-4af4ec977e1f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.826305 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/641c0b65-bee3-4545-8e1f-4af4ec977e1f-kube-api-access-whkwh" (OuterVolumeSpecName: "kube-api-access-whkwh") pod "641c0b65-bee3-4545-8e1f-4af4ec977e1f" (UID: "641c0b65-bee3-4545-8e1f-4af4ec977e1f"). InnerVolumeSpecName "kube-api-access-whkwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.826333 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/641c0b65-bee3-4545-8e1f-4af4ec977e1f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "641c0b65-bee3-4545-8e1f-4af4ec977e1f" (UID: "641c0b65-bee3-4545-8e1f-4af4ec977e1f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.921417 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whkwh\" (UniqueName: \"kubernetes.io/projected/641c0b65-bee3-4545-8e1f-4af4ec977e1f-kube-api-access-whkwh\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.921466 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/641c0b65-bee3-4545-8e1f-4af4ec977e1f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:02 crc kubenswrapper[4684]: I1013 13:45:02.921477 4684 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/641c0b65-bee3-4545-8e1f-4af4ec977e1f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:03 crc kubenswrapper[4684]: I1013 13:45:03.492687 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" event={"ID":"641c0b65-bee3-4545-8e1f-4af4ec977e1f","Type":"ContainerDied","Data":"dda5bef509a7d3d895e9f91c468fddd7c598d9d736cccb2208b14a6d6acaa494"} Oct 13 13:45:03 crc kubenswrapper[4684]: I1013 13:45:03.493072 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dda5bef509a7d3d895e9f91c468fddd7c598d9d736cccb2208b14a6d6acaa494" Oct 13 13:45:03 crc kubenswrapper[4684]: I1013 13:45:03.492977 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339385-7q54b" Oct 13 13:45:03 crc kubenswrapper[4684]: I1013 13:45:03.868268 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft"] Oct 13 13:45:03 crc kubenswrapper[4684]: I1013 13:45:03.878330 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339340-5jxft"] Oct 13 13:45:04 crc kubenswrapper[4684]: I1013 13:45:04.351412 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:45:04 crc kubenswrapper[4684]: E1013 13:45:04.351763 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:45:04 crc kubenswrapper[4684]: I1013 13:45:04.367849 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a4e53b7-9ccc-4cf5-9359-7be4899e1311" path="/var/lib/kubelet/pods/6a4e53b7-9ccc-4cf5-9359-7be4899e1311/volumes" Oct 13 13:45:18 crc kubenswrapper[4684]: I1013 13:45:18.350871 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:45:18 crc kubenswrapper[4684]: E1013 13:45:18.352928 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.351480 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:45:33 crc kubenswrapper[4684]: E1013 13:45:33.352195 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.655211 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-86l7p"] Oct 13 13:45:33 crc kubenswrapper[4684]: E1013 13:45:33.656168 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="641c0b65-bee3-4545-8e1f-4af4ec977e1f" containerName="collect-profiles" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.656330 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="641c0b65-bee3-4545-8e1f-4af4ec977e1f" containerName="collect-profiles" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.656829 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="641c0b65-bee3-4545-8e1f-4af4ec977e1f" containerName="collect-profiles" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.659260 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.665731 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86l7p"] Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.732934 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f97jt\" (UniqueName: \"kubernetes.io/projected/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-kube-api-access-f97jt\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.733405 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-catalog-content\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.733463 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-utilities\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.838846 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f97jt\" (UniqueName: \"kubernetes.io/projected/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-kube-api-access-f97jt\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.838966 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-catalog-content\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.839021 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-utilities\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.839782 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-utilities\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.840091 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-catalog-content\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.861703 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f97jt\" (UniqueName: \"kubernetes.io/projected/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-kube-api-access-f97jt\") pod \"redhat-operators-86l7p\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:33 crc kubenswrapper[4684]: I1013 13:45:33.994678 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:34 crc kubenswrapper[4684]: I1013 13:45:34.459981 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86l7p"] Oct 13 13:45:34 crc kubenswrapper[4684]: I1013 13:45:34.777598 4684 generic.go:334] "Generic (PLEG): container finished" podID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerID="1067d94d7a6e0e4347686fc912b45fb6b841ebd73d23edbdc85320a0ed28f928" exitCode=0 Oct 13 13:45:34 crc kubenswrapper[4684]: I1013 13:45:34.777704 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86l7p" event={"ID":"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f","Type":"ContainerDied","Data":"1067d94d7a6e0e4347686fc912b45fb6b841ebd73d23edbdc85320a0ed28f928"} Oct 13 13:45:34 crc kubenswrapper[4684]: I1013 13:45:34.777939 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86l7p" event={"ID":"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f","Type":"ContainerStarted","Data":"593d284c8c771b09cd24429b0165c935923758f9464c0c0456a1cde692523c49"} Oct 13 13:45:36 crc kubenswrapper[4684]: I1013 13:45:36.806026 4684 generic.go:334] "Generic (PLEG): container finished" podID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerID="31567b9720a8f5498f47c48a56a1c8ca00411ff3a20a9f10907179f90cc0e0de" exitCode=0 Oct 13 13:45:36 crc kubenswrapper[4684]: I1013 13:45:36.806087 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86l7p" event={"ID":"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f","Type":"ContainerDied","Data":"31567b9720a8f5498f47c48a56a1c8ca00411ff3a20a9f10907179f90cc0e0de"} Oct 13 13:45:37 crc kubenswrapper[4684]: I1013 13:45:37.831364 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86l7p" event={"ID":"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f","Type":"ContainerStarted","Data":"23ab06c922a864b64a55b200a5bc55e6524d58f5852e183d86168f6c358d2424"} Oct 13 13:45:37 crc kubenswrapper[4684]: I1013 13:45:37.867482 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-86l7p" podStartSLOduration=2.407694646 podStartE2EDuration="4.867459283s" podCreationTimestamp="2025-10-13 13:45:33 +0000 UTC" firstStartedPulling="2025-10-13 13:45:34.779226734 +0000 UTC m=+2289.346610804" lastFinishedPulling="2025-10-13 13:45:37.238991351 +0000 UTC m=+2291.806375441" observedRunningTime="2025-10-13 13:45:37.858128802 +0000 UTC m=+2292.425512872" watchObservedRunningTime="2025-10-13 13:45:37.867459283 +0000 UTC m=+2292.434843373" Oct 13 13:45:38 crc kubenswrapper[4684]: I1013 13:45:38.844419 4684 generic.go:334] "Generic (PLEG): container finished" podID="4281de73-4320-444b-9d71-877c9cf226a0" containerID="0e505e3b3c9776989e2c19d630aa833b6fcd347cbd186cc53cd1217a5f2b900a" exitCode=0 Oct 13 13:45:38 crc kubenswrapper[4684]: I1013 13:45:38.845990 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" event={"ID":"4281de73-4320-444b-9d71-877c9cf226a0","Type":"ContainerDied","Data":"0e505e3b3c9776989e2c19d630aa833b6fcd347cbd186cc53cd1217a5f2b900a"} Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.278377 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.371230 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-inventory\") pod \"4281de73-4320-444b-9d71-877c9cf226a0\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.371437 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zskd\" (UniqueName: \"kubernetes.io/projected/4281de73-4320-444b-9d71-877c9cf226a0-kube-api-access-8zskd\") pod \"4281de73-4320-444b-9d71-877c9cf226a0\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.371518 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-ssh-key\") pod \"4281de73-4320-444b-9d71-877c9cf226a0\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.371569 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-secret-0\") pod \"4281de73-4320-444b-9d71-877c9cf226a0\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.371715 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-combined-ca-bundle\") pod \"4281de73-4320-444b-9d71-877c9cf226a0\" (UID: \"4281de73-4320-444b-9d71-877c9cf226a0\") " Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.379004 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "4281de73-4320-444b-9d71-877c9cf226a0" (UID: "4281de73-4320-444b-9d71-877c9cf226a0"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.393994 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4281de73-4320-444b-9d71-877c9cf226a0-kube-api-access-8zskd" (OuterVolumeSpecName: "kube-api-access-8zskd") pod "4281de73-4320-444b-9d71-877c9cf226a0" (UID: "4281de73-4320-444b-9d71-877c9cf226a0"). InnerVolumeSpecName "kube-api-access-8zskd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.401361 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-inventory" (OuterVolumeSpecName: "inventory") pod "4281de73-4320-444b-9d71-877c9cf226a0" (UID: "4281de73-4320-444b-9d71-877c9cf226a0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.407461 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4281de73-4320-444b-9d71-877c9cf226a0" (UID: "4281de73-4320-444b-9d71-877c9cf226a0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.422739 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "4281de73-4320-444b-9d71-877c9cf226a0" (UID: "4281de73-4320-444b-9d71-877c9cf226a0"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.476059 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.476095 4684 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.476108 4684 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.476119 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4281de73-4320-444b-9d71-877c9cf226a0-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.476128 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zskd\" (UniqueName: \"kubernetes.io/projected/4281de73-4320-444b-9d71-877c9cf226a0-kube-api-access-8zskd\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.864068 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" event={"ID":"4281de73-4320-444b-9d71-877c9cf226a0","Type":"ContainerDied","Data":"4f632e8880dad60f59fe65217384d87caba533a9d8de3b82d245986fc7319485"} Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.864123 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f632e8880dad60f59fe65217384d87caba533a9d8de3b82d245986fc7319485" Oct 13 13:45:40 crc kubenswrapper[4684]: I1013 13:45:40.864170 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cj82r" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.007351 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2"] Oct 13 13:45:41 crc kubenswrapper[4684]: E1013 13:45:41.007723 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4281de73-4320-444b-9d71-877c9cf226a0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.007741 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="4281de73-4320-444b-9d71-877c9cf226a0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.007940 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="4281de73-4320-444b-9d71-877c9cf226a0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.008515 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.011633 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.011818 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.012030 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.012184 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.013298 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.013882 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.013988 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.021223 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2"] Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.085822 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.085873 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxbjp\" (UniqueName: \"kubernetes.io/projected/777b6ddf-59c7-4afc-841b-098fe5353aea-kube-api-access-cxbjp\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086002 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086099 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086229 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086366 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086441 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086481 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.086525 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188573 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188630 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188682 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188731 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188762 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxbjp\" (UniqueName: \"kubernetes.io/projected/777b6ddf-59c7-4afc-841b-098fe5353aea-kube-api-access-cxbjp\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188795 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188846 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.188954 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.189002 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.189815 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.192478 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.192810 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.193334 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.193520 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.193822 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.198307 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.200332 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.205948 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxbjp\" (UniqueName: \"kubernetes.io/projected/777b6ddf-59c7-4afc-841b-098fe5353aea-kube-api-access-cxbjp\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxhq2\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.329867 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:45:41 crc kubenswrapper[4684]: I1013 13:45:41.881689 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2"] Oct 13 13:45:41 crc kubenswrapper[4684]: W1013 13:45:41.889195 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod777b6ddf_59c7_4afc_841b_098fe5353aea.slice/crio-64d98f1fb9e61365cfb1c1df19bf1dbd9d22f5b51d2383910847828425df98e7 WatchSource:0}: Error finding container 64d98f1fb9e61365cfb1c1df19bf1dbd9d22f5b51d2383910847828425df98e7: Status 404 returned error can't find the container with id 64d98f1fb9e61365cfb1c1df19bf1dbd9d22f5b51d2383910847828425df98e7 Oct 13 13:45:42 crc kubenswrapper[4684]: I1013 13:45:42.884960 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" event={"ID":"777b6ddf-59c7-4afc-841b-098fe5353aea","Type":"ContainerStarted","Data":"d7e802bbd3b3059e6ff1ca182e776b48028de3e8956a9760f14f6b6163b8d972"} Oct 13 13:45:42 crc kubenswrapper[4684]: I1013 13:45:42.885333 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" event={"ID":"777b6ddf-59c7-4afc-841b-098fe5353aea","Type":"ContainerStarted","Data":"64d98f1fb9e61365cfb1c1df19bf1dbd9d22f5b51d2383910847828425df98e7"} Oct 13 13:45:42 crc kubenswrapper[4684]: I1013 13:45:42.907670 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" podStartSLOduration=2.251592901 podStartE2EDuration="2.907648173s" podCreationTimestamp="2025-10-13 13:45:40 +0000 UTC" firstStartedPulling="2025-10-13 13:45:41.891437782 +0000 UTC m=+2296.458821852" lastFinishedPulling="2025-10-13 13:45:42.547493054 +0000 UTC m=+2297.114877124" observedRunningTime="2025-10-13 13:45:42.900579092 +0000 UTC m=+2297.467963162" watchObservedRunningTime="2025-10-13 13:45:42.907648173 +0000 UTC m=+2297.475032233" Oct 13 13:45:43 crc kubenswrapper[4684]: I1013 13:45:43.995834 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:43 crc kubenswrapper[4684]: I1013 13:45:43.996224 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:44 crc kubenswrapper[4684]: I1013 13:45:44.055401 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:44 crc kubenswrapper[4684]: I1013 13:45:44.990004 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:45 crc kubenswrapper[4684]: I1013 13:45:45.046549 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86l7p"] Oct 13 13:45:46 crc kubenswrapper[4684]: I1013 13:45:46.363266 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:45:46 crc kubenswrapper[4684]: E1013 13:45:46.363747 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:45:46 crc kubenswrapper[4684]: I1013 13:45:46.921198 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-86l7p" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="registry-server" containerID="cri-o://23ab06c922a864b64a55b200a5bc55e6524d58f5852e183d86168f6c358d2424" gracePeriod=2 Oct 13 13:45:47 crc kubenswrapper[4684]: I1013 13:45:47.932196 4684 generic.go:334] "Generic (PLEG): container finished" podID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerID="23ab06c922a864b64a55b200a5bc55e6524d58f5852e183d86168f6c358d2424" exitCode=0 Oct 13 13:45:47 crc kubenswrapper[4684]: I1013 13:45:47.932263 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86l7p" event={"ID":"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f","Type":"ContainerDied","Data":"23ab06c922a864b64a55b200a5bc55e6524d58f5852e183d86168f6c358d2424"} Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.459680 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.530859 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-catalog-content\") pod \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.530939 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f97jt\" (UniqueName: \"kubernetes.io/projected/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-kube-api-access-f97jt\") pod \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.531028 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-utilities\") pod \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\" (UID: \"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f\") " Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.532246 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-utilities" (OuterVolumeSpecName: "utilities") pod "e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" (UID: "e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.539672 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-kube-api-access-f97jt" (OuterVolumeSpecName: "kube-api-access-f97jt") pod "e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" (UID: "e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f"). InnerVolumeSpecName "kube-api-access-f97jt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.616536 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" (UID: "e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.633218 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.633262 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f97jt\" (UniqueName: \"kubernetes.io/projected/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-kube-api-access-f97jt\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.633274 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.946786 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86l7p" event={"ID":"e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f","Type":"ContainerDied","Data":"593d284c8c771b09cd24429b0165c935923758f9464c0c0456a1cde692523c49"} Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.946856 4684 scope.go:117] "RemoveContainer" containerID="23ab06c922a864b64a55b200a5bc55e6524d58f5852e183d86168f6c358d2424" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.946935 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86l7p" Oct 13 13:45:48 crc kubenswrapper[4684]: I1013 13:45:48.975959 4684 scope.go:117] "RemoveContainer" containerID="31567b9720a8f5498f47c48a56a1c8ca00411ff3a20a9f10907179f90cc0e0de" Oct 13 13:45:49 crc kubenswrapper[4684]: I1013 13:45:49.003716 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86l7p"] Oct 13 13:45:49 crc kubenswrapper[4684]: I1013 13:45:49.017967 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-86l7p"] Oct 13 13:45:49 crc kubenswrapper[4684]: I1013 13:45:49.029463 4684 scope.go:117] "RemoveContainer" containerID="1067d94d7a6e0e4347686fc912b45fb6b841ebd73d23edbdc85320a0ed28f928" Oct 13 13:45:50 crc kubenswrapper[4684]: I1013 13:45:50.371426 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" path="/var/lib/kubelet/pods/e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f/volumes" Oct 13 13:45:50 crc kubenswrapper[4684]: I1013 13:45:50.381246 4684 scope.go:117] "RemoveContainer" containerID="50f34ee3e95f5ea7544d0cd5c20d73c6b8bedcf8da98054dd3d141b38c1cda0e" Oct 13 13:45:59 crc kubenswrapper[4684]: I1013 13:45:59.350731 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:45:59 crc kubenswrapper[4684]: E1013 13:45:59.351854 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:46:12 crc kubenswrapper[4684]: I1013 13:46:12.350551 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:46:12 crc kubenswrapper[4684]: E1013 13:46:12.351314 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:46:26 crc kubenswrapper[4684]: I1013 13:46:26.356805 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:46:26 crc kubenswrapper[4684]: E1013 13:46:26.357554 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:46:40 crc kubenswrapper[4684]: I1013 13:46:40.351111 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:46:40 crc kubenswrapper[4684]: E1013 13:46:40.352365 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:46:51 crc kubenswrapper[4684]: I1013 13:46:51.350593 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:46:51 crc kubenswrapper[4684]: E1013 13:46:51.351417 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:47:04 crc kubenswrapper[4684]: I1013 13:47:04.350570 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:47:04 crc kubenswrapper[4684]: E1013 13:47:04.351276 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:47:19 crc kubenswrapper[4684]: I1013 13:47:19.350625 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:47:19 crc kubenswrapper[4684]: E1013 13:47:19.351315 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:47:30 crc kubenswrapper[4684]: I1013 13:47:30.351549 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:47:30 crc kubenswrapper[4684]: E1013 13:47:30.352280 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:47:45 crc kubenswrapper[4684]: I1013 13:47:45.350923 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:47:45 crc kubenswrapper[4684]: E1013 13:47:45.351788 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:47:58 crc kubenswrapper[4684]: I1013 13:47:58.350977 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:47:58 crc kubenswrapper[4684]: E1013 13:47:58.352154 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:48:13 crc kubenswrapper[4684]: I1013 13:48:13.350682 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:48:13 crc kubenswrapper[4684]: E1013 13:48:13.351803 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:48:27 crc kubenswrapper[4684]: I1013 13:48:27.351635 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:48:27 crc kubenswrapper[4684]: E1013 13:48:27.352702 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:48:39 crc kubenswrapper[4684]: I1013 13:48:39.350742 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:48:39 crc kubenswrapper[4684]: E1013 13:48:39.351548 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:48:52 crc kubenswrapper[4684]: I1013 13:48:52.352337 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:48:52 crc kubenswrapper[4684]: E1013 13:48:52.353515 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:48:55 crc kubenswrapper[4684]: I1013 13:48:55.860701 4684 generic.go:334] "Generic (PLEG): container finished" podID="777b6ddf-59c7-4afc-841b-098fe5353aea" containerID="d7e802bbd3b3059e6ff1ca182e776b48028de3e8956a9760f14f6b6163b8d972" exitCode=0 Oct 13 13:48:55 crc kubenswrapper[4684]: I1013 13:48:55.860784 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" event={"ID":"777b6ddf-59c7-4afc-841b-098fe5353aea","Type":"ContainerDied","Data":"d7e802bbd3b3059e6ff1ca182e776b48028de3e8956a9760f14f6b6163b8d972"} Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.274036 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333374 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-0\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333427 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-combined-ca-bundle\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333459 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxbjp\" (UniqueName: \"kubernetes.io/projected/777b6ddf-59c7-4afc-841b-098fe5353aea-kube-api-access-cxbjp\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333530 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-0\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333554 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-inventory\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333614 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-1\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333683 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-extra-config-0\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333709 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-1\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.333725 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-ssh-key\") pod \"777b6ddf-59c7-4afc-841b-098fe5353aea\" (UID: \"777b6ddf-59c7-4afc-841b-098fe5353aea\") " Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.339177 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.339501 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/777b6ddf-59c7-4afc-841b-098fe5353aea-kube-api-access-cxbjp" (OuterVolumeSpecName: "kube-api-access-cxbjp") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "kube-api-access-cxbjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.365405 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.365467 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.368470 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-inventory" (OuterVolumeSpecName: "inventory") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.369006 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.370291 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.372657 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.375633 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "777b6ddf-59c7-4afc-841b-098fe5353aea" (UID: "777b6ddf-59c7-4afc-841b-098fe5353aea"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435746 4684 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435776 4684 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435786 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435797 4684 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435806 4684 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435814 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxbjp\" (UniqueName: \"kubernetes.io/projected/777b6ddf-59c7-4afc-841b-098fe5353aea-kube-api-access-cxbjp\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435823 4684 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435831 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.435839 4684 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/777b6ddf-59c7-4afc-841b-098fe5353aea-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.881192 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" event={"ID":"777b6ddf-59c7-4afc-841b-098fe5353aea","Type":"ContainerDied","Data":"64d98f1fb9e61365cfb1c1df19bf1dbd9d22f5b51d2383910847828425df98e7"} Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.881239 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64d98f1fb9e61365cfb1c1df19bf1dbd9d22f5b51d2383910847828425df98e7" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.881279 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxhq2" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.977288 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck"] Oct 13 13:48:57 crc kubenswrapper[4684]: E1013 13:48:57.977714 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="registry-server" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.977736 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="registry-server" Oct 13 13:48:57 crc kubenswrapper[4684]: E1013 13:48:57.977760 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="extract-utilities" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.977768 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="extract-utilities" Oct 13 13:48:57 crc kubenswrapper[4684]: E1013 13:48:57.977787 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="extract-content" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.977795 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="extract-content" Oct 13 13:48:57 crc kubenswrapper[4684]: E1013 13:48:57.977817 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="777b6ddf-59c7-4afc-841b-098fe5353aea" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.977827 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="777b6ddf-59c7-4afc-841b-098fe5353aea" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.978018 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8ce4f59-b52c-4d9c-9adc-10ea34e8ff9f" containerName="registry-server" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.978040 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="777b6ddf-59c7-4afc-841b-098fe5353aea" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.978604 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.981244 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.981374 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.981628 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.981742 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dr69n" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.981979 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 13:48:57 crc kubenswrapper[4684]: I1013 13:48:57.996584 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck"] Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.045745 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.045793 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd4v9\" (UniqueName: \"kubernetes.io/projected/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-kube-api-access-xd4v9\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.045831 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.045869 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.046007 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.046033 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.046081 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148021 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148080 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148166 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148251 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148288 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd4v9\" (UniqueName: \"kubernetes.io/projected/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-kube-api-access-xd4v9\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148333 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.148375 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.153214 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.153220 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.153278 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.153448 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.154255 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.168714 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.182317 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd4v9\" (UniqueName: \"kubernetes.io/projected/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-kube-api-access-xd4v9\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-4zsck\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.345384 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:48:58 crc kubenswrapper[4684]: I1013 13:48:58.905450 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck"] Oct 13 13:48:59 crc kubenswrapper[4684]: I1013 13:48:59.914582 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" event={"ID":"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879","Type":"ContainerStarted","Data":"237a61078cd4e32701d4d7a1a1be87d2566cdab08ffc24c21de1cbe2e7f8de8a"} Oct 13 13:48:59 crc kubenswrapper[4684]: I1013 13:48:59.915168 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" event={"ID":"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879","Type":"ContainerStarted","Data":"b791a71c5f0d76163e30b5ca3957e88499b33c7c8f5d47529c77c22acefa6f68"} Oct 13 13:48:59 crc kubenswrapper[4684]: I1013 13:48:59.931674 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" podStartSLOduration=2.49549656 podStartE2EDuration="2.931651824s" podCreationTimestamp="2025-10-13 13:48:57 +0000 UTC" firstStartedPulling="2025-10-13 13:48:58.915382733 +0000 UTC m=+2493.482766813" lastFinishedPulling="2025-10-13 13:48:59.351537967 +0000 UTC m=+2493.918922077" observedRunningTime="2025-10-13 13:48:59.928512786 +0000 UTC m=+2494.495896856" watchObservedRunningTime="2025-10-13 13:48:59.931651824 +0000 UTC m=+2494.499035904" Oct 13 13:49:03 crc kubenswrapper[4684]: I1013 13:49:03.350749 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:49:04 crc kubenswrapper[4684]: I1013 13:49:04.014444 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"0ff55f36bf6feb81cbd125fd720905f9a12f0eb524e393e1ac9b72518e5ce112"} Oct 13 13:49:38 crc kubenswrapper[4684]: I1013 13:49:38.223520 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-56f9bb58f9-k8bsc" podUID="61ca624d-dfba-4a64-b08f-e96cc583a2b8" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.059287 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dts2k"] Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.061942 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.075003 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dts2k"] Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.164715 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-utilities\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.164958 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5xln\" (UniqueName: \"kubernetes.io/projected/0757f0a3-9140-476e-ba9b-172960cbdd17-kube-api-access-c5xln\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.165473 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-catalog-content\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.266854 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-catalog-content\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.267224 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-utilities\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.267301 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5xln\" (UniqueName: \"kubernetes.io/projected/0757f0a3-9140-476e-ba9b-172960cbdd17-kube-api-access-c5xln\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.267547 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-catalog-content\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.267667 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-utilities\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.285679 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5xln\" (UniqueName: \"kubernetes.io/projected/0757f0a3-9140-476e-ba9b-172960cbdd17-kube-api-access-c5xln\") pod \"certified-operators-dts2k\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:23 crc kubenswrapper[4684]: I1013 13:51:23.407016 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:24 crc kubenswrapper[4684]: I1013 13:51:24.018556 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dts2k"] Oct 13 13:51:24 crc kubenswrapper[4684]: I1013 13:51:24.441546 4684 generic.go:334] "Generic (PLEG): container finished" podID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerID="43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839" exitCode=0 Oct 13 13:51:24 crc kubenswrapper[4684]: I1013 13:51:24.441660 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerDied","Data":"43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839"} Oct 13 13:51:24 crc kubenswrapper[4684]: I1013 13:51:24.441925 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerStarted","Data":"8f264ebfaf51923b3c374ae82b6405174093bf23bbef83f753fd551f7862c4bd"} Oct 13 13:51:24 crc kubenswrapper[4684]: I1013 13:51:24.443707 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 13:51:26 crc kubenswrapper[4684]: I1013 13:51:26.462684 4684 generic.go:334] "Generic (PLEG): container finished" podID="5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" containerID="237a61078cd4e32701d4d7a1a1be87d2566cdab08ffc24c21de1cbe2e7f8de8a" exitCode=0 Oct 13 13:51:26 crc kubenswrapper[4684]: I1013 13:51:26.462747 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" event={"ID":"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879","Type":"ContainerDied","Data":"237a61078cd4e32701d4d7a1a1be87d2566cdab08ffc24c21de1cbe2e7f8de8a"} Oct 13 13:51:26 crc kubenswrapper[4684]: I1013 13:51:26.468109 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerStarted","Data":"af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c"} Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.481861 4684 generic.go:334] "Generic (PLEG): container finished" podID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerID="af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c" exitCode=0 Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.481952 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerDied","Data":"af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c"} Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.482267 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerStarted","Data":"44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba"} Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.506812 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dts2k" podStartSLOduration=1.9835377539999999 podStartE2EDuration="4.506786155s" podCreationTimestamp="2025-10-13 13:51:23 +0000 UTC" firstStartedPulling="2025-10-13 13:51:24.443438607 +0000 UTC m=+2639.010822697" lastFinishedPulling="2025-10-13 13:51:26.966687028 +0000 UTC m=+2641.534071098" observedRunningTime="2025-10-13 13:51:27.50219172 +0000 UTC m=+2642.069575810" watchObservedRunningTime="2025-10-13 13:51:27.506786155 +0000 UTC m=+2642.074170225" Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.916882 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.995693 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-1\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.995799 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-2\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.995878 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-inventory\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.995935 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-telemetry-combined-ca-bundle\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.995997 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-0\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.996031 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd4v9\" (UniqueName: \"kubernetes.io/projected/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-kube-api-access-xd4v9\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:27 crc kubenswrapper[4684]: I1013 13:51:27.996076 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ssh-key\") pod \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\" (UID: \"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879\") " Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.002159 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.002347 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-kube-api-access-xd4v9" (OuterVolumeSpecName: "kube-api-access-xd4v9") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "kube-api-access-xd4v9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.025244 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.025618 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.039097 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.039142 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-inventory" (OuterVolumeSpecName: "inventory") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.043042 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" (UID: "5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.098847 4684 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.099136 4684 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.099227 4684 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.099301 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd4v9\" (UniqueName: \"kubernetes.io/projected/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-kube-api-access-xd4v9\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.099357 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.099429 4684 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.099492 4684 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.497517 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" event={"ID":"5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879","Type":"ContainerDied","Data":"b791a71c5f0d76163e30b5ca3957e88499b33c7c8f5d47529c77c22acefa6f68"} Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.497582 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b791a71c5f0d76163e30b5ca3957e88499b33c7c8f5d47529c77c22acefa6f68" Oct 13 13:51:28 crc kubenswrapper[4684]: I1013 13:51:28.497547 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-4zsck" Oct 13 13:51:30 crc kubenswrapper[4684]: I1013 13:51:30.560741 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:51:30 crc kubenswrapper[4684]: I1013 13:51:30.561430 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:51:33 crc kubenswrapper[4684]: I1013 13:51:33.407728 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:33 crc kubenswrapper[4684]: I1013 13:51:33.410367 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:33 crc kubenswrapper[4684]: I1013 13:51:33.453186 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:33 crc kubenswrapper[4684]: I1013 13:51:33.601564 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:33 crc kubenswrapper[4684]: I1013 13:51:33.693557 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dts2k"] Oct 13 13:51:35 crc kubenswrapper[4684]: I1013 13:51:35.574386 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dts2k" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="registry-server" containerID="cri-o://44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba" gracePeriod=2 Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.009359 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.058231 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5xln\" (UniqueName: \"kubernetes.io/projected/0757f0a3-9140-476e-ba9b-172960cbdd17-kube-api-access-c5xln\") pod \"0757f0a3-9140-476e-ba9b-172960cbdd17\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.058347 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-catalog-content\") pod \"0757f0a3-9140-476e-ba9b-172960cbdd17\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.058437 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-utilities\") pod \"0757f0a3-9140-476e-ba9b-172960cbdd17\" (UID: \"0757f0a3-9140-476e-ba9b-172960cbdd17\") " Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.059581 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-utilities" (OuterVolumeSpecName: "utilities") pod "0757f0a3-9140-476e-ba9b-172960cbdd17" (UID: "0757f0a3-9140-476e-ba9b-172960cbdd17"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.064858 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0757f0a3-9140-476e-ba9b-172960cbdd17-kube-api-access-c5xln" (OuterVolumeSpecName: "kube-api-access-c5xln") pod "0757f0a3-9140-476e-ba9b-172960cbdd17" (UID: "0757f0a3-9140-476e-ba9b-172960cbdd17"). InnerVolumeSpecName "kube-api-access-c5xln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.106317 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0757f0a3-9140-476e-ba9b-172960cbdd17" (UID: "0757f0a3-9140-476e-ba9b-172960cbdd17"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.160477 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5xln\" (UniqueName: \"kubernetes.io/projected/0757f0a3-9140-476e-ba9b-172960cbdd17-kube-api-access-c5xln\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.160520 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.160537 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0757f0a3-9140-476e-ba9b-172960cbdd17-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.589762 4684 generic.go:334] "Generic (PLEG): container finished" podID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerID="44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba" exitCode=0 Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.589846 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerDied","Data":"44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba"} Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.589883 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dts2k" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.589965 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dts2k" event={"ID":"0757f0a3-9140-476e-ba9b-172960cbdd17","Type":"ContainerDied","Data":"8f264ebfaf51923b3c374ae82b6405174093bf23bbef83f753fd551f7862c4bd"} Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.589996 4684 scope.go:117] "RemoveContainer" containerID="44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.619020 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dts2k"] Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.627376 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dts2k"] Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.630858 4684 scope.go:117] "RemoveContainer" containerID="af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.661845 4684 scope.go:117] "RemoveContainer" containerID="43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.730413 4684 scope.go:117] "RemoveContainer" containerID="44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba" Oct 13 13:51:36 crc kubenswrapper[4684]: E1013 13:51:36.730962 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba\": container with ID starting with 44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba not found: ID does not exist" containerID="44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.731005 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba"} err="failed to get container status \"44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba\": rpc error: code = NotFound desc = could not find container \"44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba\": container with ID starting with 44d380e04bfd5ca517f7affb60b47835fc1de47f08f79b906ade332294fb43ba not found: ID does not exist" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.731037 4684 scope.go:117] "RemoveContainer" containerID="af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c" Oct 13 13:51:36 crc kubenswrapper[4684]: E1013 13:51:36.731371 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c\": container with ID starting with af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c not found: ID does not exist" containerID="af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.731406 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c"} err="failed to get container status \"af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c\": rpc error: code = NotFound desc = could not find container \"af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c\": container with ID starting with af4574fa0075cb650702bd8ac0a4f64bb9fe0ea116293915c6fe77a5326a046c not found: ID does not exist" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.731431 4684 scope.go:117] "RemoveContainer" containerID="43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839" Oct 13 13:51:36 crc kubenswrapper[4684]: E1013 13:51:36.731998 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839\": container with ID starting with 43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839 not found: ID does not exist" containerID="43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839" Oct 13 13:51:36 crc kubenswrapper[4684]: I1013 13:51:36.732028 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839"} err="failed to get container status \"43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839\": rpc error: code = NotFound desc = could not find container \"43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839\": container with ID starting with 43af2ce0fc0f3c47377c7758b004c2384f0cb3d4cfb46ae2e151b05402c2b839 not found: ID does not exist" Oct 13 13:51:38 crc kubenswrapper[4684]: I1013 13:51:38.368192 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" path="/var/lib/kubelet/pods/0757f0a3-9140-476e-ba9b-172960cbdd17/volumes" Oct 13 13:52:00 crc kubenswrapper[4684]: I1013 13:52:00.560434 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:52:00 crc kubenswrapper[4684]: I1013 13:52:00.561138 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.348757 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 13 13:52:13 crc kubenswrapper[4684]: E1013 13:52:13.350438 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="extract-utilities" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.350475 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="extract-utilities" Oct 13 13:52:13 crc kubenswrapper[4684]: E1013 13:52:13.350508 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.350521 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 13 13:52:13 crc kubenswrapper[4684]: E1013 13:52:13.350573 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="extract-content" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.350585 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="extract-content" Oct 13 13:52:13 crc kubenswrapper[4684]: E1013 13:52:13.350615 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="registry-server" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.350627 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="registry-server" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.351027 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="0757f0a3-9140-476e-ba9b-172960cbdd17" containerName="registry-server" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.351071 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.352662 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.355415 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.364112 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wp95n" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.364273 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.364989 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.366464 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470269 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470423 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470486 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470558 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470655 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470688 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-config-data\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470723 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470800 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvvpl\" (UniqueName: \"kubernetes.io/projected/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-kube-api-access-hvvpl\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.470826 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572383 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572497 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572560 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572593 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572660 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572685 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-config-data\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572735 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572771 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvvpl\" (UniqueName: \"kubernetes.io/projected/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-kube-api-access-hvvpl\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572791 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.572978 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.573456 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.573586 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.574246 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-config-data\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.574651 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.580264 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.580457 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.582159 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.603116 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.603614 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvvpl\" (UniqueName: \"kubernetes.io/projected/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-kube-api-access-hvvpl\") pod \"tempest-tests-tempest\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " pod="openstack/tempest-tests-tempest" Oct 13 13:52:13 crc kubenswrapper[4684]: I1013 13:52:13.694503 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 13:52:14 crc kubenswrapper[4684]: I1013 13:52:14.146330 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 13 13:52:15 crc kubenswrapper[4684]: I1013 13:52:15.026942 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30","Type":"ContainerStarted","Data":"1ec1513527277b10deb949a0072b026fc2e627f1b809a1e1496e51b48016b5e0"} Oct 13 13:52:30 crc kubenswrapper[4684]: I1013 13:52:30.559500 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:52:30 crc kubenswrapper[4684]: I1013 13:52:30.560113 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:52:30 crc kubenswrapper[4684]: I1013 13:52:30.560160 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:52:30 crc kubenswrapper[4684]: I1013 13:52:30.561114 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0ff55f36bf6feb81cbd125fd720905f9a12f0eb524e393e1ac9b72518e5ce112"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:52:30 crc kubenswrapper[4684]: I1013 13:52:30.561169 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://0ff55f36bf6feb81cbd125fd720905f9a12f0eb524e393e1ac9b72518e5ce112" gracePeriod=600 Oct 13 13:52:31 crc kubenswrapper[4684]: I1013 13:52:31.171734 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="0ff55f36bf6feb81cbd125fd720905f9a12f0eb524e393e1ac9b72518e5ce112" exitCode=0 Oct 13 13:52:31 crc kubenswrapper[4684]: I1013 13:52:31.172283 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"0ff55f36bf6feb81cbd125fd720905f9a12f0eb524e393e1ac9b72518e5ce112"} Oct 13 13:52:31 crc kubenswrapper[4684]: I1013 13:52:31.172324 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de"} Oct 13 13:52:31 crc kubenswrapper[4684]: I1013 13:52:31.172347 4684 scope.go:117] "RemoveContainer" containerID="0cbd4a4e9b2cecb1ed1ba416920e58a915f95b34eb8fd0b2f35a5a0f1bfd420d" Oct 13 13:52:59 crc kubenswrapper[4684]: E1013 13:52:59.828362 4684 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:1e4eeec18f8da2b364b39b7a7358aef5" Oct 13 13:52:59 crc kubenswrapper[4684]: E1013 13:52:59.829002 4684 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:1e4eeec18f8da2b364b39b7a7358aef5" Oct 13 13:52:59 crc kubenswrapper[4684]: E1013 13:52:59.829229 4684 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:1e4eeec18f8da2b364b39b7a7358aef5,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hvvpl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(cba49bf6-7402-47c1-bc2d-fc49dc6b0e30): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 13:52:59 crc kubenswrapper[4684]: E1013 13:52:59.834096 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" Oct 13 13:53:00 crc kubenswrapper[4684]: E1013 13:53:00.511194 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:1e4eeec18f8da2b364b39b7a7358aef5\\\"\"" pod="openstack/tempest-tests-tempest" podUID="cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" Oct 13 13:53:13 crc kubenswrapper[4684]: I1013 13:53:13.532213 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 13 13:53:14 crc kubenswrapper[4684]: I1013 13:53:14.669068 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30","Type":"ContainerStarted","Data":"2dd0ef3183ef150c4b8588ed1177ecb3b8355ec3221e1acc8b94621d4f48776c"} Oct 13 13:53:14 crc kubenswrapper[4684]: I1013 13:53:14.692868 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.323551808 podStartE2EDuration="1m2.692851662s" podCreationTimestamp="2025-10-13 13:52:12 +0000 UTC" firstStartedPulling="2025-10-13 13:52:14.160471124 +0000 UTC m=+2688.727855194" lastFinishedPulling="2025-10-13 13:53:13.529770978 +0000 UTC m=+2748.097155048" observedRunningTime="2025-10-13 13:53:14.692171101 +0000 UTC m=+2749.259555171" watchObservedRunningTime="2025-10-13 13:53:14.692851662 +0000 UTC m=+2749.260235732" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.550559 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xfz5b"] Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.554418 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.575994 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xfz5b"] Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.635429 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dgwf\" (UniqueName: \"kubernetes.io/projected/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-kube-api-access-9dgwf\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.635478 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-utilities\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.635496 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-catalog-content\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.737374 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dgwf\" (UniqueName: \"kubernetes.io/projected/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-kube-api-access-9dgwf\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.737428 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-utilities\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.737448 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-catalog-content\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.737969 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-catalog-content\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.738201 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-utilities\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.758655 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dgwf\" (UniqueName: \"kubernetes.io/projected/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-kube-api-access-9dgwf\") pod \"community-operators-xfz5b\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:17 crc kubenswrapper[4684]: I1013 13:53:17.877684 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:18 crc kubenswrapper[4684]: W1013 13:53:18.397112 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35bc7825_ba4e_4b1f_96d8_0e267f36ed39.slice/crio-79f713be8c1188789b7d1d4c4a801d0fcca326032b36c24b103b9b999797634c WatchSource:0}: Error finding container 79f713be8c1188789b7d1d4c4a801d0fcca326032b36c24b103b9b999797634c: Status 404 returned error can't find the container with id 79f713be8c1188789b7d1d4c4a801d0fcca326032b36c24b103b9b999797634c Oct 13 13:53:18 crc kubenswrapper[4684]: I1013 13:53:18.402149 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xfz5b"] Oct 13 13:53:18 crc kubenswrapper[4684]: I1013 13:53:18.704458 4684 generic.go:334] "Generic (PLEG): container finished" podID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerID="d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884" exitCode=0 Oct 13 13:53:18 crc kubenswrapper[4684]: I1013 13:53:18.704725 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerDied","Data":"d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884"} Oct 13 13:53:18 crc kubenswrapper[4684]: I1013 13:53:18.704794 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerStarted","Data":"79f713be8c1188789b7d1d4c4a801d0fcca326032b36c24b103b9b999797634c"} Oct 13 13:53:19 crc kubenswrapper[4684]: I1013 13:53:19.719478 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerStarted","Data":"1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a"} Oct 13 13:53:21 crc kubenswrapper[4684]: I1013 13:53:21.745815 4684 generic.go:334] "Generic (PLEG): container finished" podID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerID="1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a" exitCode=0 Oct 13 13:53:21 crc kubenswrapper[4684]: I1013 13:53:21.745991 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerDied","Data":"1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a"} Oct 13 13:53:22 crc kubenswrapper[4684]: I1013 13:53:22.763853 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerStarted","Data":"6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999"} Oct 13 13:53:22 crc kubenswrapper[4684]: I1013 13:53:22.796777 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xfz5b" podStartSLOduration=2.372502144 podStartE2EDuration="5.796755343s" podCreationTimestamp="2025-10-13 13:53:17 +0000 UTC" firstStartedPulling="2025-10-13 13:53:18.706302228 +0000 UTC m=+2753.273686298" lastFinishedPulling="2025-10-13 13:53:22.130555417 +0000 UTC m=+2756.697939497" observedRunningTime="2025-10-13 13:53:22.78227908 +0000 UTC m=+2757.349663190" watchObservedRunningTime="2025-10-13 13:53:22.796755343 +0000 UTC m=+2757.364139423" Oct 13 13:53:27 crc kubenswrapper[4684]: I1013 13:53:27.877798 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:27 crc kubenswrapper[4684]: I1013 13:53:27.878325 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:28 crc kubenswrapper[4684]: I1013 13:53:28.929953 4684 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-xfz5b" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="registry-server" probeResult="failure" output=< Oct 13 13:53:28 crc kubenswrapper[4684]: timeout: failed to connect service ":50051" within 1s Oct 13 13:53:28 crc kubenswrapper[4684]: > Oct 13 13:53:37 crc kubenswrapper[4684]: I1013 13:53:37.932716 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:37 crc kubenswrapper[4684]: I1013 13:53:37.996188 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:38 crc kubenswrapper[4684]: I1013 13:53:38.170825 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xfz5b"] Oct 13 13:53:39 crc kubenswrapper[4684]: I1013 13:53:39.938178 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xfz5b" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="registry-server" containerID="cri-o://6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999" gracePeriod=2 Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.470566 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.535884 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-utilities\") pod \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.536006 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dgwf\" (UniqueName: \"kubernetes.io/projected/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-kube-api-access-9dgwf\") pod \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.536256 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-catalog-content\") pod \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\" (UID: \"35bc7825-ba4e-4b1f-96d8-0e267f36ed39\") " Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.536696 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-utilities" (OuterVolumeSpecName: "utilities") pod "35bc7825-ba4e-4b1f-96d8-0e267f36ed39" (UID: "35bc7825-ba4e-4b1f-96d8-0e267f36ed39"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.541703 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-kube-api-access-9dgwf" (OuterVolumeSpecName: "kube-api-access-9dgwf") pod "35bc7825-ba4e-4b1f-96d8-0e267f36ed39" (UID: "35bc7825-ba4e-4b1f-96d8-0e267f36ed39"). InnerVolumeSpecName "kube-api-access-9dgwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.549043 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.549093 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dgwf\" (UniqueName: \"kubernetes.io/projected/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-kube-api-access-9dgwf\") on node \"crc\" DevicePath \"\"" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.578289 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35bc7825-ba4e-4b1f-96d8-0e267f36ed39" (UID: "35bc7825-ba4e-4b1f-96d8-0e267f36ed39"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.650804 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35bc7825-ba4e-4b1f-96d8-0e267f36ed39-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.953787 4684 generic.go:334] "Generic (PLEG): container finished" podID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerID="6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999" exitCode=0 Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.953838 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerDied","Data":"6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999"} Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.953949 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xfz5b" event={"ID":"35bc7825-ba4e-4b1f-96d8-0e267f36ed39","Type":"ContainerDied","Data":"79f713be8c1188789b7d1d4c4a801d0fcca326032b36c24b103b9b999797634c"} Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.953989 4684 scope.go:117] "RemoveContainer" containerID="6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.954008 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xfz5b" Oct 13 13:53:40 crc kubenswrapper[4684]: I1013 13:53:40.993755 4684 scope.go:117] "RemoveContainer" containerID="1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.020381 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xfz5b"] Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.033254 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xfz5b"] Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.039407 4684 scope.go:117] "RemoveContainer" containerID="d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.095884 4684 scope.go:117] "RemoveContainer" containerID="6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999" Oct 13 13:53:41 crc kubenswrapper[4684]: E1013 13:53:41.096871 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999\": container with ID starting with 6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999 not found: ID does not exist" containerID="6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.096942 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999"} err="failed to get container status \"6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999\": rpc error: code = NotFound desc = could not find container \"6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999\": container with ID starting with 6172264b48dc479e8217cb7bb2f8d0c83cb61250cb9a3c7cb3122231603ca999 not found: ID does not exist" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.096975 4684 scope.go:117] "RemoveContainer" containerID="1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a" Oct 13 13:53:41 crc kubenswrapper[4684]: E1013 13:53:41.097553 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a\": container with ID starting with 1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a not found: ID does not exist" containerID="1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.097588 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a"} err="failed to get container status \"1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a\": rpc error: code = NotFound desc = could not find container \"1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a\": container with ID starting with 1def59ed5806f491ba2509a173b9cbfd587dc94aa8331879f0c14871deba810a not found: ID does not exist" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.097609 4684 scope.go:117] "RemoveContainer" containerID="d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884" Oct 13 13:53:41 crc kubenswrapper[4684]: E1013 13:53:41.097968 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884\": container with ID starting with d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884 not found: ID does not exist" containerID="d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884" Oct 13 13:53:41 crc kubenswrapper[4684]: I1013 13:53:41.098006 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884"} err="failed to get container status \"d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884\": rpc error: code = NotFound desc = could not find container \"d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884\": container with ID starting with d4ed7e7371bd6fd6db5c71fb917d7293105348f40778e6a1f296ab3789877884 not found: ID does not exist" Oct 13 13:53:42 crc kubenswrapper[4684]: I1013 13:53:42.365810 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" path="/var/lib/kubelet/pods/35bc7825-ba4e-4b1f-96d8-0e267f36ed39/volumes" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.683740 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dvjs8"] Oct 13 13:54:19 crc kubenswrapper[4684]: E1013 13:54:19.684927 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="registry-server" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.684947 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="registry-server" Oct 13 13:54:19 crc kubenswrapper[4684]: E1013 13:54:19.684974 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="extract-utilities" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.684986 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="extract-utilities" Oct 13 13:54:19 crc kubenswrapper[4684]: E1013 13:54:19.685002 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="extract-content" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.685011 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="extract-content" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.685256 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="35bc7825-ba4e-4b1f-96d8-0e267f36ed39" containerName="registry-server" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.687025 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.704830 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dvjs8"] Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.809653 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-catalog-content\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.809739 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gzmk\" (UniqueName: \"kubernetes.io/projected/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-kube-api-access-6gzmk\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.809763 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-utilities\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.911680 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gzmk\" (UniqueName: \"kubernetes.io/projected/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-kube-api-access-6gzmk\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.911742 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-utilities\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.911891 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-catalog-content\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.912375 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-catalog-content\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.912463 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-utilities\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:19 crc kubenswrapper[4684]: I1013 13:54:19.947069 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gzmk\" (UniqueName: \"kubernetes.io/projected/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-kube-api-access-6gzmk\") pod \"redhat-marketplace-dvjs8\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:20 crc kubenswrapper[4684]: I1013 13:54:20.014267 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:20 crc kubenswrapper[4684]: I1013 13:54:20.464680 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dvjs8"] Oct 13 13:54:21 crc kubenswrapper[4684]: I1013 13:54:21.391345 4684 generic.go:334] "Generic (PLEG): container finished" podID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerID="bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe" exitCode=0 Oct 13 13:54:21 crc kubenswrapper[4684]: I1013 13:54:21.391480 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dvjs8" event={"ID":"f0f77266-1a46-4d1e-812c-dbdbe6be0a41","Type":"ContainerDied","Data":"bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe"} Oct 13 13:54:21 crc kubenswrapper[4684]: I1013 13:54:21.391981 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dvjs8" event={"ID":"f0f77266-1a46-4d1e-812c-dbdbe6be0a41","Type":"ContainerStarted","Data":"398dc8ae6da05c42116aa71b3e8ba5b814c89b33dc9dc4168c0d4601c7b72533"} Oct 13 13:54:23 crc kubenswrapper[4684]: I1013 13:54:23.422670 4684 generic.go:334] "Generic (PLEG): container finished" podID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerID="90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45" exitCode=0 Oct 13 13:54:23 crc kubenswrapper[4684]: I1013 13:54:23.422780 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dvjs8" event={"ID":"f0f77266-1a46-4d1e-812c-dbdbe6be0a41","Type":"ContainerDied","Data":"90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45"} Oct 13 13:54:24 crc kubenswrapper[4684]: I1013 13:54:24.436286 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dvjs8" event={"ID":"f0f77266-1a46-4d1e-812c-dbdbe6be0a41","Type":"ContainerStarted","Data":"d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4"} Oct 13 13:54:24 crc kubenswrapper[4684]: I1013 13:54:24.462621 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dvjs8" podStartSLOduration=2.894496842 podStartE2EDuration="5.462598811s" podCreationTimestamp="2025-10-13 13:54:19 +0000 UTC" firstStartedPulling="2025-10-13 13:54:21.394765215 +0000 UTC m=+2815.962149285" lastFinishedPulling="2025-10-13 13:54:23.962867154 +0000 UTC m=+2818.530251254" observedRunningTime="2025-10-13 13:54:24.45871602 +0000 UTC m=+2819.026100090" watchObservedRunningTime="2025-10-13 13:54:24.462598811 +0000 UTC m=+2819.029982881" Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.015479 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.016219 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.089697 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.560376 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.560448 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.568244 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:30 crc kubenswrapper[4684]: I1013 13:54:30.623199 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dvjs8"] Oct 13 13:54:32 crc kubenswrapper[4684]: I1013 13:54:32.519580 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dvjs8" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="registry-server" containerID="cri-o://d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4" gracePeriod=2 Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.047729 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.219873 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-utilities\") pod \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.219980 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-catalog-content\") pod \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.220086 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gzmk\" (UniqueName: \"kubernetes.io/projected/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-kube-api-access-6gzmk\") pod \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\" (UID: \"f0f77266-1a46-4d1e-812c-dbdbe6be0a41\") " Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.221037 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-utilities" (OuterVolumeSpecName: "utilities") pod "f0f77266-1a46-4d1e-812c-dbdbe6be0a41" (UID: "f0f77266-1a46-4d1e-812c-dbdbe6be0a41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.231199 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-kube-api-access-6gzmk" (OuterVolumeSpecName: "kube-api-access-6gzmk") pod "f0f77266-1a46-4d1e-812c-dbdbe6be0a41" (UID: "f0f77266-1a46-4d1e-812c-dbdbe6be0a41"). InnerVolumeSpecName "kube-api-access-6gzmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.237978 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0f77266-1a46-4d1e-812c-dbdbe6be0a41" (UID: "f0f77266-1a46-4d1e-812c-dbdbe6be0a41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.323311 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.323378 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.323402 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gzmk\" (UniqueName: \"kubernetes.io/projected/f0f77266-1a46-4d1e-812c-dbdbe6be0a41-kube-api-access-6gzmk\") on node \"crc\" DevicePath \"\"" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.539210 4684 generic.go:334] "Generic (PLEG): container finished" podID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerID="d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4" exitCode=0 Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.539313 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dvjs8" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.539322 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dvjs8" event={"ID":"f0f77266-1a46-4d1e-812c-dbdbe6be0a41","Type":"ContainerDied","Data":"d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4"} Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.539449 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dvjs8" event={"ID":"f0f77266-1a46-4d1e-812c-dbdbe6be0a41","Type":"ContainerDied","Data":"398dc8ae6da05c42116aa71b3e8ba5b814c89b33dc9dc4168c0d4601c7b72533"} Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.539469 4684 scope.go:117] "RemoveContainer" containerID="d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.569994 4684 scope.go:117] "RemoveContainer" containerID="90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.600823 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dvjs8"] Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.612018 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dvjs8"] Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.617736 4684 scope.go:117] "RemoveContainer" containerID="bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.661290 4684 scope.go:117] "RemoveContainer" containerID="d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4" Oct 13 13:54:33 crc kubenswrapper[4684]: E1013 13:54:33.661888 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4\": container with ID starting with d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4 not found: ID does not exist" containerID="d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.661987 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4"} err="failed to get container status \"d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4\": rpc error: code = NotFound desc = could not find container \"d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4\": container with ID starting with d64f3b7272c7b502e21e28ac9c4150bf46893ff687f2694683d803dd25f020e4 not found: ID does not exist" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.662029 4684 scope.go:117] "RemoveContainer" containerID="90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45" Oct 13 13:54:33 crc kubenswrapper[4684]: E1013 13:54:33.663316 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45\": container with ID starting with 90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45 not found: ID does not exist" containerID="90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.663354 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45"} err="failed to get container status \"90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45\": rpc error: code = NotFound desc = could not find container \"90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45\": container with ID starting with 90cc9e599fd1942c5334036d59de4b16a249ef972b34c629be14da80a8791e45 not found: ID does not exist" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.663383 4684 scope.go:117] "RemoveContainer" containerID="bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe" Oct 13 13:54:33 crc kubenswrapper[4684]: E1013 13:54:33.663856 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe\": container with ID starting with bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe not found: ID does not exist" containerID="bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe" Oct 13 13:54:33 crc kubenswrapper[4684]: I1013 13:54:33.663994 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe"} err="failed to get container status \"bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe\": rpc error: code = NotFound desc = could not find container \"bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe\": container with ID starting with bea86e5aa917c4163bae4a696f54d355d80598c0fcd9da2e496a5aa3b16bc0fe not found: ID does not exist" Oct 13 13:54:34 crc kubenswrapper[4684]: I1013 13:54:34.366280 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" path="/var/lib/kubelet/pods/f0f77266-1a46-4d1e-812c-dbdbe6be0a41/volumes" Oct 13 13:55:00 crc kubenswrapper[4684]: I1013 13:55:00.559757 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:55:00 crc kubenswrapper[4684]: I1013 13:55:00.560404 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:55:30 crc kubenswrapper[4684]: I1013 13:55:30.560773 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 13:55:30 crc kubenswrapper[4684]: I1013 13:55:30.561422 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 13:55:30 crc kubenswrapper[4684]: I1013 13:55:30.561501 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 13:55:30 crc kubenswrapper[4684]: I1013 13:55:30.562623 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 13:55:30 crc kubenswrapper[4684]: I1013 13:55:30.562729 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" gracePeriod=600 Oct 13 13:55:30 crc kubenswrapper[4684]: E1013 13:55:30.710372 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:55:31 crc kubenswrapper[4684]: I1013 13:55:31.148001 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" exitCode=0 Oct 13 13:55:31 crc kubenswrapper[4684]: I1013 13:55:31.148299 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de"} Oct 13 13:55:31 crc kubenswrapper[4684]: I1013 13:55:31.148346 4684 scope.go:117] "RemoveContainer" containerID="0ff55f36bf6feb81cbd125fd720905f9a12f0eb524e393e1ac9b72518e5ce112" Oct 13 13:55:31 crc kubenswrapper[4684]: I1013 13:55:31.149098 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:55:31 crc kubenswrapper[4684]: E1013 13:55:31.149389 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:55:45 crc kubenswrapper[4684]: I1013 13:55:45.350662 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:55:45 crc kubenswrapper[4684]: E1013 13:55:45.351376 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.019110 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s2bxz"] Oct 13 13:55:55 crc kubenswrapper[4684]: E1013 13:55:55.020297 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="extract-utilities" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.020321 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="extract-utilities" Oct 13 13:55:55 crc kubenswrapper[4684]: E1013 13:55:55.020360 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="registry-server" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.020372 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="registry-server" Oct 13 13:55:55 crc kubenswrapper[4684]: E1013 13:55:55.020415 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="extract-content" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.020427 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="extract-content" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.020678 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0f77266-1a46-4d1e-812c-dbdbe6be0a41" containerName="registry-server" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.022741 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.035622 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s2bxz"] Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.095298 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4knpx\" (UniqueName: \"kubernetes.io/projected/ea40821d-0ccb-40fb-a29b-f8414d2667d7-kube-api-access-4knpx\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.095538 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-catalog-content\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.095621 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-utilities\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.197258 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-catalog-content\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.197343 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-utilities\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.197498 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4knpx\" (UniqueName: \"kubernetes.io/projected/ea40821d-0ccb-40fb-a29b-f8414d2667d7-kube-api-access-4knpx\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.198018 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-utilities\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.198247 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-catalog-content\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.217191 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4knpx\" (UniqueName: \"kubernetes.io/projected/ea40821d-0ccb-40fb-a29b-f8414d2667d7-kube-api-access-4knpx\") pod \"redhat-operators-s2bxz\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.357214 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:55:55 crc kubenswrapper[4684]: I1013 13:55:55.835752 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s2bxz"] Oct 13 13:55:56 crc kubenswrapper[4684]: I1013 13:55:56.406281 4684 generic.go:334] "Generic (PLEG): container finished" podID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerID="a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104" exitCode=0 Oct 13 13:55:56 crc kubenswrapper[4684]: I1013 13:55:56.406376 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerDied","Data":"a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104"} Oct 13 13:55:56 crc kubenswrapper[4684]: I1013 13:55:56.406598 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerStarted","Data":"7d1a2f7e99ab36f6d8125de0443dc06477763ff76c82f537cf815c11ea81dfee"} Oct 13 13:55:57 crc kubenswrapper[4684]: I1013 13:55:57.418355 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerStarted","Data":"8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777"} Oct 13 13:55:58 crc kubenswrapper[4684]: I1013 13:55:58.350851 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:55:58 crc kubenswrapper[4684]: E1013 13:55:58.351512 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:55:58 crc kubenswrapper[4684]: I1013 13:55:58.437021 4684 generic.go:334] "Generic (PLEG): container finished" podID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerID="8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777" exitCode=0 Oct 13 13:55:58 crc kubenswrapper[4684]: I1013 13:55:58.437100 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerDied","Data":"8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777"} Oct 13 13:56:00 crc kubenswrapper[4684]: I1013 13:56:00.457856 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerStarted","Data":"ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b"} Oct 13 13:56:00 crc kubenswrapper[4684]: I1013 13:56:00.480453 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s2bxz" podStartSLOduration=3.905956825 podStartE2EDuration="6.480426123s" podCreationTimestamp="2025-10-13 13:55:54 +0000 UTC" firstStartedPulling="2025-10-13 13:55:56.40921267 +0000 UTC m=+2910.976596740" lastFinishedPulling="2025-10-13 13:55:58.983681958 +0000 UTC m=+2913.551066038" observedRunningTime="2025-10-13 13:56:00.476711056 +0000 UTC m=+2915.044095176" watchObservedRunningTime="2025-10-13 13:56:00.480426123 +0000 UTC m=+2915.047810213" Oct 13 13:56:05 crc kubenswrapper[4684]: I1013 13:56:05.357853 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:56:05 crc kubenswrapper[4684]: I1013 13:56:05.358607 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:56:05 crc kubenswrapper[4684]: I1013 13:56:05.418621 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:56:05 crc kubenswrapper[4684]: I1013 13:56:05.584307 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:56:05 crc kubenswrapper[4684]: I1013 13:56:05.657292 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s2bxz"] Oct 13 13:56:07 crc kubenswrapper[4684]: I1013 13:56:07.534505 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s2bxz" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="registry-server" containerID="cri-o://ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b" gracePeriod=2 Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.084836 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.190378 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-catalog-content\") pod \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.190495 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-utilities\") pod \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.190617 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4knpx\" (UniqueName: \"kubernetes.io/projected/ea40821d-0ccb-40fb-a29b-f8414d2667d7-kube-api-access-4knpx\") pod \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\" (UID: \"ea40821d-0ccb-40fb-a29b-f8414d2667d7\") " Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.191800 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-utilities" (OuterVolumeSpecName: "utilities") pod "ea40821d-0ccb-40fb-a29b-f8414d2667d7" (UID: "ea40821d-0ccb-40fb-a29b-f8414d2667d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.199343 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea40821d-0ccb-40fb-a29b-f8414d2667d7-kube-api-access-4knpx" (OuterVolumeSpecName: "kube-api-access-4knpx") pod "ea40821d-0ccb-40fb-a29b-f8414d2667d7" (UID: "ea40821d-0ccb-40fb-a29b-f8414d2667d7"). InnerVolumeSpecName "kube-api-access-4knpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.292979 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4knpx\" (UniqueName: \"kubernetes.io/projected/ea40821d-0ccb-40fb-a29b-f8414d2667d7-kube-api-access-4knpx\") on node \"crc\" DevicePath \"\"" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.293006 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.306403 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea40821d-0ccb-40fb-a29b-f8414d2667d7" (UID: "ea40821d-0ccb-40fb-a29b-f8414d2667d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.395144 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea40821d-0ccb-40fb-a29b-f8414d2667d7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.544719 4684 generic.go:334] "Generic (PLEG): container finished" podID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerID="ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b" exitCode=0 Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.544790 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s2bxz" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.544818 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerDied","Data":"ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b"} Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.546216 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s2bxz" event={"ID":"ea40821d-0ccb-40fb-a29b-f8414d2667d7","Type":"ContainerDied","Data":"7d1a2f7e99ab36f6d8125de0443dc06477763ff76c82f537cf815c11ea81dfee"} Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.546242 4684 scope.go:117] "RemoveContainer" containerID="ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.567215 4684 scope.go:117] "RemoveContainer" containerID="8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.572761 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s2bxz"] Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.581573 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s2bxz"] Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.592761 4684 scope.go:117] "RemoveContainer" containerID="a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.626987 4684 scope.go:117] "RemoveContainer" containerID="ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b" Oct 13 13:56:08 crc kubenswrapper[4684]: E1013 13:56:08.627471 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b\": container with ID starting with ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b not found: ID does not exist" containerID="ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.627514 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b"} err="failed to get container status \"ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b\": rpc error: code = NotFound desc = could not find container \"ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b\": container with ID starting with ed6ea14c1bc28ff434ec6da6d45db8e41c2352dfb78e76a97b0af52a2e9e271b not found: ID does not exist" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.627542 4684 scope.go:117] "RemoveContainer" containerID="8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777" Oct 13 13:56:08 crc kubenswrapper[4684]: E1013 13:56:08.627797 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777\": container with ID starting with 8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777 not found: ID does not exist" containerID="8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.627824 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777"} err="failed to get container status \"8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777\": rpc error: code = NotFound desc = could not find container \"8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777\": container with ID starting with 8ab8d99afd288f37579530730e7b884af0bc6a1a5413f4d0bcc5665af358f777 not found: ID does not exist" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.627839 4684 scope.go:117] "RemoveContainer" containerID="a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104" Oct 13 13:56:08 crc kubenswrapper[4684]: E1013 13:56:08.628275 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104\": container with ID starting with a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104 not found: ID does not exist" containerID="a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104" Oct 13 13:56:08 crc kubenswrapper[4684]: I1013 13:56:08.628302 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104"} err="failed to get container status \"a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104\": rpc error: code = NotFound desc = could not find container \"a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104\": container with ID starting with a2f385e6867b89bfd93ba7fc70cb576ea74752f9a0aeb65ea9989660a5059104 not found: ID does not exist" Oct 13 13:56:10 crc kubenswrapper[4684]: I1013 13:56:10.368205 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" path="/var/lib/kubelet/pods/ea40821d-0ccb-40fb-a29b-f8414d2667d7/volumes" Oct 13 13:56:12 crc kubenswrapper[4684]: I1013 13:56:12.350880 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:56:12 crc kubenswrapper[4684]: E1013 13:56:12.351455 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:56:23 crc kubenswrapper[4684]: I1013 13:56:23.351139 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:56:23 crc kubenswrapper[4684]: E1013 13:56:23.352730 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:56:37 crc kubenswrapper[4684]: I1013 13:56:37.351150 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:56:37 crc kubenswrapper[4684]: E1013 13:56:37.352039 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:56:52 crc kubenswrapper[4684]: I1013 13:56:52.350800 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:56:52 crc kubenswrapper[4684]: E1013 13:56:52.351821 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:57:07 crc kubenswrapper[4684]: I1013 13:57:07.351424 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:57:07 crc kubenswrapper[4684]: E1013 13:57:07.352697 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:57:21 crc kubenswrapper[4684]: I1013 13:57:21.351044 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:57:21 crc kubenswrapper[4684]: E1013 13:57:21.353786 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:57:36 crc kubenswrapper[4684]: I1013 13:57:36.364907 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:57:36 crc kubenswrapper[4684]: E1013 13:57:36.365723 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:57:47 crc kubenswrapper[4684]: I1013 13:57:47.351441 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:57:47 crc kubenswrapper[4684]: E1013 13:57:47.352238 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:58:02 crc kubenswrapper[4684]: I1013 13:58:02.351348 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:58:02 crc kubenswrapper[4684]: E1013 13:58:02.352349 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:58:13 crc kubenswrapper[4684]: I1013 13:58:13.350616 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:58:13 crc kubenswrapper[4684]: E1013 13:58:13.351535 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:58:24 crc kubenswrapper[4684]: I1013 13:58:24.350874 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:58:24 crc kubenswrapper[4684]: E1013 13:58:24.351678 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:58:35 crc kubenswrapper[4684]: I1013 13:58:35.350867 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:58:35 crc kubenswrapper[4684]: E1013 13:58:35.351977 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:58:50 crc kubenswrapper[4684]: I1013 13:58:50.351690 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:58:50 crc kubenswrapper[4684]: E1013 13:58:50.352966 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:59:02 crc kubenswrapper[4684]: I1013 13:59:02.350764 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:59:02 crc kubenswrapper[4684]: E1013 13:59:02.353585 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:59:14 crc kubenswrapper[4684]: I1013 13:59:14.352984 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:59:14 crc kubenswrapper[4684]: E1013 13:59:14.354841 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:59:25 crc kubenswrapper[4684]: I1013 13:59:25.350429 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:59:25 crc kubenswrapper[4684]: E1013 13:59:25.351234 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:59:37 crc kubenswrapper[4684]: I1013 13:59:37.350826 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:59:37 crc kubenswrapper[4684]: E1013 13:59:37.351736 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 13:59:52 crc kubenswrapper[4684]: I1013 13:59:52.351108 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 13:59:52 crc kubenswrapper[4684]: E1013 13:59:52.352245 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.180380 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b"] Oct 13 14:00:00 crc kubenswrapper[4684]: E1013 14:00:00.181480 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="extract-content" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.181500 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="extract-content" Oct 13 14:00:00 crc kubenswrapper[4684]: E1013 14:00:00.181526 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="registry-server" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.181534 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="registry-server" Oct 13 14:00:00 crc kubenswrapper[4684]: E1013 14:00:00.181569 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="extract-utilities" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.181579 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="extract-utilities" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.181807 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea40821d-0ccb-40fb-a29b-f8414d2667d7" containerName="registry-server" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.182491 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.185560 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.185785 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.202326 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b"] Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.281080 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trz2k\" (UniqueName: \"kubernetes.io/projected/37c8555e-6ba0-492d-982b-af51d93959c5-kube-api-access-trz2k\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.281276 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37c8555e-6ba0-492d-982b-af51d93959c5-secret-volume\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.281342 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37c8555e-6ba0-492d-982b-af51d93959c5-config-volume\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.384023 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trz2k\" (UniqueName: \"kubernetes.io/projected/37c8555e-6ba0-492d-982b-af51d93959c5-kube-api-access-trz2k\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.384724 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37c8555e-6ba0-492d-982b-af51d93959c5-secret-volume\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.384825 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37c8555e-6ba0-492d-982b-af51d93959c5-config-volume\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.386886 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37c8555e-6ba0-492d-982b-af51d93959c5-config-volume\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.393383 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37c8555e-6ba0-492d-982b-af51d93959c5-secret-volume\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.404510 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trz2k\" (UniqueName: \"kubernetes.io/projected/37c8555e-6ba0-492d-982b-af51d93959c5-kube-api-access-trz2k\") pod \"collect-profiles-29339400-88r2b\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.510222 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:00 crc kubenswrapper[4684]: I1013 14:00:00.971367 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b"] Oct 13 14:00:01 crc kubenswrapper[4684]: I1013 14:00:01.912166 4684 generic.go:334] "Generic (PLEG): container finished" podID="37c8555e-6ba0-492d-982b-af51d93959c5" containerID="1ed92d4bf7a310521f835ad9678f9b6f7b6ce0307cecab7f8ac9aeeb683e37a2" exitCode=0 Oct 13 14:00:01 crc kubenswrapper[4684]: I1013 14:00:01.912334 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" event={"ID":"37c8555e-6ba0-492d-982b-af51d93959c5","Type":"ContainerDied","Data":"1ed92d4bf7a310521f835ad9678f9b6f7b6ce0307cecab7f8ac9aeeb683e37a2"} Oct 13 14:00:01 crc kubenswrapper[4684]: I1013 14:00:01.912511 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" event={"ID":"37c8555e-6ba0-492d-982b-af51d93959c5","Type":"ContainerStarted","Data":"c3591a8badce0adb95ae95fbae444b0c4286a281a8295d5a74d3ec2da675c00f"} Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.305620 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.341549 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37c8555e-6ba0-492d-982b-af51d93959c5-secret-volume\") pod \"37c8555e-6ba0-492d-982b-af51d93959c5\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.341630 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37c8555e-6ba0-492d-982b-af51d93959c5-config-volume\") pod \"37c8555e-6ba0-492d-982b-af51d93959c5\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.342610 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37c8555e-6ba0-492d-982b-af51d93959c5-config-volume" (OuterVolumeSpecName: "config-volume") pod "37c8555e-6ba0-492d-982b-af51d93959c5" (UID: "37c8555e-6ba0-492d-982b-af51d93959c5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.351264 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 14:00:03 crc kubenswrapper[4684]: E1013 14:00:03.351553 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.352000 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37c8555e-6ba0-492d-982b-af51d93959c5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "37c8555e-6ba0-492d-982b-af51d93959c5" (UID: "37c8555e-6ba0-492d-982b-af51d93959c5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.443210 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trz2k\" (UniqueName: \"kubernetes.io/projected/37c8555e-6ba0-492d-982b-af51d93959c5-kube-api-access-trz2k\") pod \"37c8555e-6ba0-492d-982b-af51d93959c5\" (UID: \"37c8555e-6ba0-492d-982b-af51d93959c5\") " Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.443968 4684 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/37c8555e-6ba0-492d-982b-af51d93959c5-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.443994 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/37c8555e-6ba0-492d-982b-af51d93959c5-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.448605 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37c8555e-6ba0-492d-982b-af51d93959c5-kube-api-access-trz2k" (OuterVolumeSpecName: "kube-api-access-trz2k") pod "37c8555e-6ba0-492d-982b-af51d93959c5" (UID: "37c8555e-6ba0-492d-982b-af51d93959c5"). InnerVolumeSpecName "kube-api-access-trz2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.545827 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trz2k\" (UniqueName: \"kubernetes.io/projected/37c8555e-6ba0-492d-982b-af51d93959c5-kube-api-access-trz2k\") on node \"crc\" DevicePath \"\"" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.936543 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" event={"ID":"37c8555e-6ba0-492d-982b-af51d93959c5","Type":"ContainerDied","Data":"c3591a8badce0adb95ae95fbae444b0c4286a281a8295d5a74d3ec2da675c00f"} Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.936797 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3591a8badce0adb95ae95fbae444b0c4286a281a8295d5a74d3ec2da675c00f" Oct 13 14:00:03 crc kubenswrapper[4684]: I1013 14:00:03.936640 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339400-88r2b" Oct 13 14:00:04 crc kubenswrapper[4684]: I1013 14:00:04.413174 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk"] Oct 13 14:00:04 crc kubenswrapper[4684]: I1013 14:00:04.421260 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339355-jtjhk"] Oct 13 14:00:06 crc kubenswrapper[4684]: I1013 14:00:06.363840 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14b78ff6-21ce-4b5b-a6bf-915e28c2956b" path="/var/lib/kubelet/pods/14b78ff6-21ce-4b5b-a6bf-915e28c2956b/volumes" Oct 13 14:00:14 crc kubenswrapper[4684]: I1013 14:00:14.350792 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 14:00:14 crc kubenswrapper[4684]: E1013 14:00:14.351560 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:00:26 crc kubenswrapper[4684]: I1013 14:00:26.362543 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 14:00:26 crc kubenswrapper[4684]: E1013 14:00:26.363489 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:00:40 crc kubenswrapper[4684]: I1013 14:00:40.350658 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 14:00:41 crc kubenswrapper[4684]: I1013 14:00:41.323167 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"8f039b205cc277f559160266927df66f1bc5c5103396973c169e80ddca9064c9"} Oct 13 14:00:50 crc kubenswrapper[4684]: I1013 14:00:50.872983 4684 scope.go:117] "RemoveContainer" containerID="d98b9e339e90cd06a6a2cb70490f05968117d1e8d9e6ecc8aaf857de851d294e" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.179524 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29339401-8sg44"] Oct 13 14:01:00 crc kubenswrapper[4684]: E1013 14:01:00.180829 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37c8555e-6ba0-492d-982b-af51d93959c5" containerName="collect-profiles" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.180861 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="37c8555e-6ba0-492d-982b-af51d93959c5" containerName="collect-profiles" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.181471 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="37c8555e-6ba0-492d-982b-af51d93959c5" containerName="collect-profiles" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.182963 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.191404 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29339401-8sg44"] Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.297284 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-combined-ca-bundle\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.297668 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-config-data\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.297713 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-fernet-keys\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.298023 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jclkq\" (UniqueName: \"kubernetes.io/projected/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-kube-api-access-jclkq\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.400073 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-config-data\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.400127 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-fernet-keys\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.400227 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jclkq\" (UniqueName: \"kubernetes.io/projected/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-kube-api-access-jclkq\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.400299 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-combined-ca-bundle\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.408052 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-combined-ca-bundle\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.408195 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-config-data\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.408276 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-fernet-keys\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.431601 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jclkq\" (UniqueName: \"kubernetes.io/projected/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-kube-api-access-jclkq\") pod \"keystone-cron-29339401-8sg44\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.507929 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:00 crc kubenswrapper[4684]: W1013 14:01:00.956784 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc58f1c18_97fd_4e58_a0e4_9bd84740c0f5.slice/crio-214762146dd0c1727643f93ae88490fd8c641a3faee17a0e64022ad1aed497be WatchSource:0}: Error finding container 214762146dd0c1727643f93ae88490fd8c641a3faee17a0e64022ad1aed497be: Status 404 returned error can't find the container with id 214762146dd0c1727643f93ae88490fd8c641a3faee17a0e64022ad1aed497be Oct 13 14:01:00 crc kubenswrapper[4684]: I1013 14:01:00.957257 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29339401-8sg44"] Oct 13 14:01:01 crc kubenswrapper[4684]: I1013 14:01:01.546786 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339401-8sg44" event={"ID":"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5","Type":"ContainerStarted","Data":"7812e5448ce7e788dc95c84bc9c75d69a4fadc0457feb094fbcd9c297af6090f"} Oct 13 14:01:01 crc kubenswrapper[4684]: I1013 14:01:01.547223 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339401-8sg44" event={"ID":"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5","Type":"ContainerStarted","Data":"214762146dd0c1727643f93ae88490fd8c641a3faee17a0e64022ad1aed497be"} Oct 13 14:01:01 crc kubenswrapper[4684]: I1013 14:01:01.579209 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29339401-8sg44" podStartSLOduration=1.5791849500000001 podStartE2EDuration="1.57918495s" podCreationTimestamp="2025-10-13 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 14:01:01.570081626 +0000 UTC m=+3216.137465706" watchObservedRunningTime="2025-10-13 14:01:01.57918495 +0000 UTC m=+3216.146569030" Oct 13 14:01:03 crc kubenswrapper[4684]: I1013 14:01:03.578476 4684 generic.go:334] "Generic (PLEG): container finished" podID="c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" containerID="7812e5448ce7e788dc95c84bc9c75d69a4fadc0457feb094fbcd9c297af6090f" exitCode=0 Oct 13 14:01:03 crc kubenswrapper[4684]: I1013 14:01:03.578509 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339401-8sg44" event={"ID":"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5","Type":"ContainerDied","Data":"7812e5448ce7e788dc95c84bc9c75d69a4fadc0457feb094fbcd9c297af6090f"} Oct 13 14:01:04 crc kubenswrapper[4684]: I1013 14:01:04.978550 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.000150 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-config-data\") pod \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.000212 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-fernet-keys\") pod \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.000292 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jclkq\" (UniqueName: \"kubernetes.io/projected/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-kube-api-access-jclkq\") pod \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.000323 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-combined-ca-bundle\") pod \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\" (UID: \"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5\") " Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.006501 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" (UID: "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.019327 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-kube-api-access-jclkq" (OuterVolumeSpecName: "kube-api-access-jclkq") pod "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" (UID: "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5"). InnerVolumeSpecName "kube-api-access-jclkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.046711 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" (UID: "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.080666 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-config-data" (OuterVolumeSpecName: "config-data") pod "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" (UID: "c58f1c18-97fd-4e58-a0e4-9bd84740c0f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.102114 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jclkq\" (UniqueName: \"kubernetes.io/projected/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-kube-api-access-jclkq\") on node \"crc\" DevicePath \"\"" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.102153 4684 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.102167 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.102181 4684 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c58f1c18-97fd-4e58-a0e4-9bd84740c0f5-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.599066 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339401-8sg44" event={"ID":"c58f1c18-97fd-4e58-a0e4-9bd84740c0f5","Type":"ContainerDied","Data":"214762146dd0c1727643f93ae88490fd8c641a3faee17a0e64022ad1aed497be"} Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.599109 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="214762146dd0c1727643f93ae88490fd8c641a3faee17a0e64022ad1aed497be" Oct 13 14:01:05 crc kubenswrapper[4684]: I1013 14:01:05.599143 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339401-8sg44" Oct 13 14:03:00 crc kubenswrapper[4684]: I1013 14:03:00.560337 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:03:00 crc kubenswrapper[4684]: I1013 14:03:00.561133 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:03:30 crc kubenswrapper[4684]: I1013 14:03:30.560146 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:03:30 crc kubenswrapper[4684]: I1013 14:03:30.560779 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.529511 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-886j2"] Oct 13 14:03:39 crc kubenswrapper[4684]: E1013 14:03:39.532653 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" containerName="keystone-cron" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.532759 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" containerName="keystone-cron" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.533493 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="c58f1c18-97fd-4e58-a0e4-9bd84740c0f5" containerName="keystone-cron" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.535274 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.548855 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-886j2"] Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.641589 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b8k4\" (UniqueName: \"kubernetes.io/projected/06a2a426-baf3-4998-9d6e-91ca324b69f3-kube-api-access-6b8k4\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.641682 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-utilities\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.641740 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-catalog-content\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.753929 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b8k4\" (UniqueName: \"kubernetes.io/projected/06a2a426-baf3-4998-9d6e-91ca324b69f3-kube-api-access-6b8k4\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.754047 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-utilities\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.754142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-catalog-content\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.754802 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-catalog-content\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.755561 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-utilities\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.780664 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b8k4\" (UniqueName: \"kubernetes.io/projected/06a2a426-baf3-4998-9d6e-91ca324b69f3-kube-api-access-6b8k4\") pod \"community-operators-886j2\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:39 crc kubenswrapper[4684]: I1013 14:03:39.855277 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:40 crc kubenswrapper[4684]: I1013 14:03:40.433828 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-886j2"] Oct 13 14:03:41 crc kubenswrapper[4684]: I1013 14:03:41.143877 4684 generic.go:334] "Generic (PLEG): container finished" podID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerID="8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072" exitCode=0 Oct 13 14:03:41 crc kubenswrapper[4684]: I1013 14:03:41.144280 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerDied","Data":"8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072"} Oct 13 14:03:41 crc kubenswrapper[4684]: I1013 14:03:41.144310 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerStarted","Data":"35f5e136265ea14dd8e28b14eb6033d92a230e945f89a80949462fa41b1f8bfb"} Oct 13 14:03:41 crc kubenswrapper[4684]: I1013 14:03:41.146087 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 14:03:42 crc kubenswrapper[4684]: I1013 14:03:42.154619 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerStarted","Data":"48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6"} Oct 13 14:03:44 crc kubenswrapper[4684]: I1013 14:03:44.175449 4684 generic.go:334] "Generic (PLEG): container finished" podID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerID="48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6" exitCode=0 Oct 13 14:03:44 crc kubenswrapper[4684]: I1013 14:03:44.175833 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerDied","Data":"48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6"} Oct 13 14:03:45 crc kubenswrapper[4684]: I1013 14:03:45.188499 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerStarted","Data":"e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3"} Oct 13 14:03:45 crc kubenswrapper[4684]: I1013 14:03:45.215398 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-886j2" podStartSLOduration=2.5994891129999997 podStartE2EDuration="6.215379803s" podCreationTimestamp="2025-10-13 14:03:39 +0000 UTC" firstStartedPulling="2025-10-13 14:03:41.145829196 +0000 UTC m=+3375.713213266" lastFinishedPulling="2025-10-13 14:03:44.761719876 +0000 UTC m=+3379.329103956" observedRunningTime="2025-10-13 14:03:45.209642553 +0000 UTC m=+3379.777026633" watchObservedRunningTime="2025-10-13 14:03:45.215379803 +0000 UTC m=+3379.782763873" Oct 13 14:03:49 crc kubenswrapper[4684]: I1013 14:03:49.855626 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:49 crc kubenswrapper[4684]: I1013 14:03:49.856396 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:49 crc kubenswrapper[4684]: I1013 14:03:49.910778 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:50 crc kubenswrapper[4684]: I1013 14:03:50.287705 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:50 crc kubenswrapper[4684]: I1013 14:03:50.359929 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-886j2"] Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.253122 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-886j2" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="registry-server" containerID="cri-o://e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3" gracePeriod=2 Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.787046 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.900405 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-catalog-content\") pod \"06a2a426-baf3-4998-9d6e-91ca324b69f3\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.900478 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-utilities\") pod \"06a2a426-baf3-4998-9d6e-91ca324b69f3\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.900696 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b8k4\" (UniqueName: \"kubernetes.io/projected/06a2a426-baf3-4998-9d6e-91ca324b69f3-kube-api-access-6b8k4\") pod \"06a2a426-baf3-4998-9d6e-91ca324b69f3\" (UID: \"06a2a426-baf3-4998-9d6e-91ca324b69f3\") " Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.901528 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-utilities" (OuterVolumeSpecName: "utilities") pod "06a2a426-baf3-4998-9d6e-91ca324b69f3" (UID: "06a2a426-baf3-4998-9d6e-91ca324b69f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.907136 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a2a426-baf3-4998-9d6e-91ca324b69f3-kube-api-access-6b8k4" (OuterVolumeSpecName: "kube-api-access-6b8k4") pod "06a2a426-baf3-4998-9d6e-91ca324b69f3" (UID: "06a2a426-baf3-4998-9d6e-91ca324b69f3"). InnerVolumeSpecName "kube-api-access-6b8k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:03:52 crc kubenswrapper[4684]: I1013 14:03:52.959642 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06a2a426-baf3-4998-9d6e-91ca324b69f3" (UID: "06a2a426-baf3-4998-9d6e-91ca324b69f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.003356 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b8k4\" (UniqueName: \"kubernetes.io/projected/06a2a426-baf3-4998-9d6e-91ca324b69f3-kube-api-access-6b8k4\") on node \"crc\" DevicePath \"\"" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.003399 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.003415 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06a2a426-baf3-4998-9d6e-91ca324b69f3-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.268980 4684 generic.go:334] "Generic (PLEG): container finished" podID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerID="e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3" exitCode=0 Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.269044 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerDied","Data":"e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3"} Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.269091 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-886j2" event={"ID":"06a2a426-baf3-4998-9d6e-91ca324b69f3","Type":"ContainerDied","Data":"35f5e136265ea14dd8e28b14eb6033d92a230e945f89a80949462fa41b1f8bfb"} Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.269119 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-886j2" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.269119 4684 scope.go:117] "RemoveContainer" containerID="e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.300468 4684 scope.go:117] "RemoveContainer" containerID="48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.325302 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-886j2"] Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.339363 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-886j2"] Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.356114 4684 scope.go:117] "RemoveContainer" containerID="8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.392647 4684 scope.go:117] "RemoveContainer" containerID="e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3" Oct 13 14:03:53 crc kubenswrapper[4684]: E1013 14:03:53.393341 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3\": container with ID starting with e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3 not found: ID does not exist" containerID="e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.393376 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3"} err="failed to get container status \"e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3\": rpc error: code = NotFound desc = could not find container \"e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3\": container with ID starting with e1dbcf9f769e067f7154005eec18e03e7e76d88cac19ebe5fd7dd8dfc0fbf4d3 not found: ID does not exist" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.393403 4684 scope.go:117] "RemoveContainer" containerID="48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6" Oct 13 14:03:53 crc kubenswrapper[4684]: E1013 14:03:53.393846 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6\": container with ID starting with 48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6 not found: ID does not exist" containerID="48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.393876 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6"} err="failed to get container status \"48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6\": rpc error: code = NotFound desc = could not find container \"48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6\": container with ID starting with 48374d691225b5e8faa2eb3b062bb05d0d5fcb5a742dd83433643cecd6ba37d6 not found: ID does not exist" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.393893 4684 scope.go:117] "RemoveContainer" containerID="8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072" Oct 13 14:03:53 crc kubenswrapper[4684]: E1013 14:03:53.394574 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072\": container with ID starting with 8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072 not found: ID does not exist" containerID="8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072" Oct 13 14:03:53 crc kubenswrapper[4684]: I1013 14:03:53.394605 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072"} err="failed to get container status \"8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072\": rpc error: code = NotFound desc = could not find container \"8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072\": container with ID starting with 8bbac643a9c96724860d2660ee7372da9af4226a99b3cff31434c692355f0072 not found: ID does not exist" Oct 13 14:03:54 crc kubenswrapper[4684]: I1013 14:03:54.361624 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" path="/var/lib/kubelet/pods/06a2a426-baf3-4998-9d6e-91ca324b69f3/volumes" Oct 13 14:04:00 crc kubenswrapper[4684]: I1013 14:04:00.560529 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:04:00 crc kubenswrapper[4684]: I1013 14:04:00.561302 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:04:00 crc kubenswrapper[4684]: I1013 14:04:00.561376 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 14:04:00 crc kubenswrapper[4684]: I1013 14:04:00.562575 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8f039b205cc277f559160266927df66f1bc5c5103396973c169e80ddca9064c9"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 14:04:00 crc kubenswrapper[4684]: I1013 14:04:00.562688 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://8f039b205cc277f559160266927df66f1bc5c5103396973c169e80ddca9064c9" gracePeriod=600 Oct 13 14:04:01 crc kubenswrapper[4684]: I1013 14:04:01.350804 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="8f039b205cc277f559160266927df66f1bc5c5103396973c169e80ddca9064c9" exitCode=0 Oct 13 14:04:01 crc kubenswrapper[4684]: I1013 14:04:01.350897 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"8f039b205cc277f559160266927df66f1bc5c5103396973c169e80ddca9064c9"} Oct 13 14:04:01 crc kubenswrapper[4684]: I1013 14:04:01.351233 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7"} Oct 13 14:04:01 crc kubenswrapper[4684]: I1013 14:04:01.351260 4684 scope.go:117] "RemoveContainer" containerID="a7c5d93126aaebdd4848a2ab79d485c7300e9fc731ea7f0830c930b86ae0d6de" Oct 13 14:04:27 crc kubenswrapper[4684]: I1013 14:04:27.653653 4684 generic.go:334] "Generic (PLEG): container finished" podID="cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" containerID="2dd0ef3183ef150c4b8588ed1177ecb3b8355ec3221e1acc8b94621d4f48776c" exitCode=0 Oct 13 14:04:27 crc kubenswrapper[4684]: I1013 14:04:27.654399 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30","Type":"ContainerDied","Data":"2dd0ef3183ef150c4b8588ed1177ecb3b8355ec3221e1acc8b94621d4f48776c"} Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.054471 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.163996 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvvpl\" (UniqueName: \"kubernetes.io/projected/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-kube-api-access-hvvpl\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164125 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ssh-key\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164227 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164263 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-config-data\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164292 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164357 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ca-certs\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164455 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-workdir\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164480 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-temporary\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.164519 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config-secret\") pod \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\" (UID: \"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30\") " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.165249 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.165538 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-config-data" (OuterVolumeSpecName: "config-data") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.169861 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-kube-api-access-hvvpl" (OuterVolumeSpecName: "kube-api-access-hvvpl") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "kube-api-access-hvvpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.169983 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.170506 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.196149 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.196584 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.198317 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.215146 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" (UID: "cba49bf6-7402-47c1-bc2d-fc49dc6b0e30"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266627 4684 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266672 4684 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266687 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266700 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvvpl\" (UniqueName: \"kubernetes.io/projected/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-kube-api-access-hvvpl\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266713 4684 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266754 4684 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266767 4684 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266779 4684 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.266791 4684 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/cba49bf6-7402-47c1-bc2d-fc49dc6b0e30-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.294541 4684 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.368715 4684 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.685263 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"cba49bf6-7402-47c1-bc2d-fc49dc6b0e30","Type":"ContainerDied","Data":"1ec1513527277b10deb949a0072b026fc2e627f1b809a1e1496e51b48016b5e0"} Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.685312 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ec1513527277b10deb949a0072b026fc2e627f1b809a1e1496e51b48016b5e0" Oct 13 14:04:29 crc kubenswrapper[4684]: I1013 14:04:29.685363 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.072420 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 13 14:04:38 crc kubenswrapper[4684]: E1013 14:04:38.073819 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" containerName="tempest-tests-tempest-tests-runner" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.073841 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" containerName="tempest-tests-tempest-tests-runner" Oct 13 14:04:38 crc kubenswrapper[4684]: E1013 14:04:38.073876 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="extract-utilities" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.073889 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="extract-utilities" Oct 13 14:04:38 crc kubenswrapper[4684]: E1013 14:04:38.073945 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="registry-server" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.073956 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="registry-server" Oct 13 14:04:38 crc kubenswrapper[4684]: E1013 14:04:38.073983 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="extract-content" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.073996 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="extract-content" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.074295 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a2a426-baf3-4998-9d6e-91ca324b69f3" containerName="registry-server" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.074334 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="cba49bf6-7402-47c1-bc2d-fc49dc6b0e30" containerName="tempest-tests-tempest-tests-runner" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.075355 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.078828 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wp95n" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.084478 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.089468 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.090135 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgmh5\" (UniqueName: \"kubernetes.io/projected/77c83104-d58a-4883-ad94-dfdca3feb2d7-kube-api-access-qgmh5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.195561 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.195718 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgmh5\" (UniqueName: \"kubernetes.io/projected/77c83104-d58a-4883-ad94-dfdca3feb2d7-kube-api-access-qgmh5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.196167 4684 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.225152 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgmh5\" (UniqueName: \"kubernetes.io/projected/77c83104-d58a-4883-ad94-dfdca3feb2d7-kube-api-access-qgmh5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.228707 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"77c83104-d58a-4883-ad94-dfdca3feb2d7\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.435332 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 14:04:38 crc kubenswrapper[4684]: I1013 14:04:38.883745 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 13 14:04:39 crc kubenswrapper[4684]: I1013 14:04:39.797284 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"77c83104-d58a-4883-ad94-dfdca3feb2d7","Type":"ContainerStarted","Data":"78af5b84b5b5d648734210e8233997f6fa891786bd2793c00f01e2201e7da9ef"} Oct 13 14:04:40 crc kubenswrapper[4684]: I1013 14:04:40.811727 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"77c83104-d58a-4883-ad94-dfdca3feb2d7","Type":"ContainerStarted","Data":"52df2e3f2afb9afa9198e08b0fc9a370b4c8703b8155d4bf88501c67ca7951bb"} Oct 13 14:04:40 crc kubenswrapper[4684]: I1013 14:04:40.840861 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.8827206250000001 podStartE2EDuration="2.840829985s" podCreationTimestamp="2025-10-13 14:04:38 +0000 UTC" firstStartedPulling="2025-10-13 14:04:38.895344475 +0000 UTC m=+3433.462728585" lastFinishedPulling="2025-10-13 14:04:39.853453875 +0000 UTC m=+3434.420837945" observedRunningTime="2025-10-13 14:04:40.825567405 +0000 UTC m=+3435.392951535" watchObservedRunningTime="2025-10-13 14:04:40.840829985 +0000 UTC m=+3435.408214095" Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.793995 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlmfj/must-gather-8lvtv"] Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.796644 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.798963 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nlmfj"/"default-dockercfg-mspvv" Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.799168 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nlmfj"/"kube-root-ca.crt" Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.804093 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nlmfj/must-gather-8lvtv"] Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.805620 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nlmfj"/"openshift-service-ca.crt" Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.920069 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rftk\" (UniqueName: \"kubernetes.io/projected/da586989-4b48-4d87-9aa7-64c80eb810cb-kube-api-access-4rftk\") pod \"must-gather-8lvtv\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:57 crc kubenswrapper[4684]: I1013 14:04:57.920163 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/da586989-4b48-4d87-9aa7-64c80eb810cb-must-gather-output\") pod \"must-gather-8lvtv\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:58 crc kubenswrapper[4684]: I1013 14:04:58.021783 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rftk\" (UniqueName: \"kubernetes.io/projected/da586989-4b48-4d87-9aa7-64c80eb810cb-kube-api-access-4rftk\") pod \"must-gather-8lvtv\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:58 crc kubenswrapper[4684]: I1013 14:04:58.021861 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/da586989-4b48-4d87-9aa7-64c80eb810cb-must-gather-output\") pod \"must-gather-8lvtv\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:58 crc kubenswrapper[4684]: I1013 14:04:58.022420 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/da586989-4b48-4d87-9aa7-64c80eb810cb-must-gather-output\") pod \"must-gather-8lvtv\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:58 crc kubenswrapper[4684]: I1013 14:04:58.041056 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rftk\" (UniqueName: \"kubernetes.io/projected/da586989-4b48-4d87-9aa7-64c80eb810cb-kube-api-access-4rftk\") pod \"must-gather-8lvtv\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:58 crc kubenswrapper[4684]: I1013 14:04:58.117359 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:04:58 crc kubenswrapper[4684]: I1013 14:04:58.613045 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nlmfj/must-gather-8lvtv"] Oct 13 14:04:59 crc kubenswrapper[4684]: I1013 14:04:59.009785 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" event={"ID":"da586989-4b48-4d87-9aa7-64c80eb810cb","Type":"ContainerStarted","Data":"bbdef0c3552a6d64188acb5dd38b075e49126cc2ec051522be6cc27ef6c79fba"} Oct 13 14:05:03 crc kubenswrapper[4684]: I1013 14:05:03.050786 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" event={"ID":"da586989-4b48-4d87-9aa7-64c80eb810cb","Type":"ContainerStarted","Data":"21ec255dded6e7df1d0398295d8e4327195faad60de6e1f22e7656d547aa4d82"} Oct 13 14:05:04 crc kubenswrapper[4684]: I1013 14:05:04.065396 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" event={"ID":"da586989-4b48-4d87-9aa7-64c80eb810cb","Type":"ContainerStarted","Data":"7c658129b513d415745299380e5a6604a322379dc9db056d832905a39f9cd77f"} Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.501460 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" podStartSLOduration=5.521247403 podStartE2EDuration="9.501440754s" podCreationTimestamp="2025-10-13 14:04:57 +0000 UTC" firstStartedPulling="2025-10-13 14:04:58.617979728 +0000 UTC m=+3453.185363808" lastFinishedPulling="2025-10-13 14:05:02.598173089 +0000 UTC m=+3457.165557159" observedRunningTime="2025-10-13 14:05:04.100633905 +0000 UTC m=+3458.668017985" watchObservedRunningTime="2025-10-13 14:05:06.501440754 +0000 UTC m=+3461.068824824" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.508592 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-2f85g"] Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.510388 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.610191 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wm82\" (UniqueName: \"kubernetes.io/projected/cfe4b515-b9df-4971-9705-212c2a24fdb0-kube-api-access-8wm82\") pod \"crc-debug-2f85g\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.610273 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe4b515-b9df-4971-9705-212c2a24fdb0-host\") pod \"crc-debug-2f85g\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.712312 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe4b515-b9df-4971-9705-212c2a24fdb0-host\") pod \"crc-debug-2f85g\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.712426 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe4b515-b9df-4971-9705-212c2a24fdb0-host\") pod \"crc-debug-2f85g\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.712603 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wm82\" (UniqueName: \"kubernetes.io/projected/cfe4b515-b9df-4971-9705-212c2a24fdb0-kube-api-access-8wm82\") pod \"crc-debug-2f85g\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.737770 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wm82\" (UniqueName: \"kubernetes.io/projected/cfe4b515-b9df-4971-9705-212c2a24fdb0-kube-api-access-8wm82\") pod \"crc-debug-2f85g\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: I1013 14:05:06.840707 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:06 crc kubenswrapper[4684]: W1013 14:05:06.899887 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfe4b515_b9df_4971_9705_212c2a24fdb0.slice/crio-8d99c823ef3166890131c04189ab2ffd47a21a4e9aa171ba3fef5bd5a64e6a21 WatchSource:0}: Error finding container 8d99c823ef3166890131c04189ab2ffd47a21a4e9aa171ba3fef5bd5a64e6a21: Status 404 returned error can't find the container with id 8d99c823ef3166890131c04189ab2ffd47a21a4e9aa171ba3fef5bd5a64e6a21 Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.095398 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" event={"ID":"cfe4b515-b9df-4971-9705-212c2a24fdb0","Type":"ContainerStarted","Data":"8d99c823ef3166890131c04189ab2ffd47a21a4e9aa171ba3fef5bd5a64e6a21"} Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.379615 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ztv9t"] Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.382263 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.391094 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztv9t"] Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.533548 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2b9b\" (UniqueName: \"kubernetes.io/projected/944027a3-2a5c-4b67-abda-e86894722e1b-kube-api-access-j2b9b\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.533685 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-catalog-content\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.533757 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-utilities\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.635602 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-catalog-content\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.636001 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-utilities\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.636265 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2b9b\" (UniqueName: \"kubernetes.io/projected/944027a3-2a5c-4b67-abda-e86894722e1b-kube-api-access-j2b9b\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.636532 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-catalog-content\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.636582 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-utilities\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.659513 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2b9b\" (UniqueName: \"kubernetes.io/projected/944027a3-2a5c-4b67-abda-e86894722e1b-kube-api-access-j2b9b\") pod \"redhat-marketplace-ztv9t\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:07 crc kubenswrapper[4684]: I1013 14:05:07.730877 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:08 crc kubenswrapper[4684]: I1013 14:05:08.446661 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztv9t"] Oct 13 14:05:09 crc kubenswrapper[4684]: I1013 14:05:09.129698 4684 generic.go:334] "Generic (PLEG): container finished" podID="944027a3-2a5c-4b67-abda-e86894722e1b" containerID="f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b" exitCode=0 Oct 13 14:05:09 crc kubenswrapper[4684]: I1013 14:05:09.129848 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztv9t" event={"ID":"944027a3-2a5c-4b67-abda-e86894722e1b","Type":"ContainerDied","Data":"f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b"} Oct 13 14:05:09 crc kubenswrapper[4684]: I1013 14:05:09.130146 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztv9t" event={"ID":"944027a3-2a5c-4b67-abda-e86894722e1b","Type":"ContainerStarted","Data":"3f4d408ffce48bd1444596cb4d6288b1f416b58e480640277e7271911f2dcfc3"} Oct 13 14:05:11 crc kubenswrapper[4684]: I1013 14:05:11.159873 4684 generic.go:334] "Generic (PLEG): container finished" podID="944027a3-2a5c-4b67-abda-e86894722e1b" containerID="97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be" exitCode=0 Oct 13 14:05:11 crc kubenswrapper[4684]: I1013 14:05:11.159991 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztv9t" event={"ID":"944027a3-2a5c-4b67-abda-e86894722e1b","Type":"ContainerDied","Data":"97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be"} Oct 13 14:05:12 crc kubenswrapper[4684]: I1013 14:05:12.178103 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztv9t" event={"ID":"944027a3-2a5c-4b67-abda-e86894722e1b","Type":"ContainerStarted","Data":"b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a"} Oct 13 14:05:12 crc kubenswrapper[4684]: I1013 14:05:12.208259 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ztv9t" podStartSLOduration=2.759040592 podStartE2EDuration="5.208236341s" podCreationTimestamp="2025-10-13 14:05:07 +0000 UTC" firstStartedPulling="2025-10-13 14:05:09.135649414 +0000 UTC m=+3463.703033484" lastFinishedPulling="2025-10-13 14:05:11.584845163 +0000 UTC m=+3466.152229233" observedRunningTime="2025-10-13 14:05:12.204423922 +0000 UTC m=+3466.771807992" watchObservedRunningTime="2025-10-13 14:05:12.208236341 +0000 UTC m=+3466.775620411" Oct 13 14:05:17 crc kubenswrapper[4684]: I1013 14:05:17.732091 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:17 crc kubenswrapper[4684]: I1013 14:05:17.732797 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:17 crc kubenswrapper[4684]: I1013 14:05:17.785132 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:18 crc kubenswrapper[4684]: I1013 14:05:18.321077 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:18 crc kubenswrapper[4684]: I1013 14:05:18.371415 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztv9t"] Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.259732 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ztv9t" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="registry-server" containerID="cri-o://b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a" gracePeriod=2 Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.790770 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.914121 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-catalog-content\") pod \"944027a3-2a5c-4b67-abda-e86894722e1b\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.914407 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2b9b\" (UniqueName: \"kubernetes.io/projected/944027a3-2a5c-4b67-abda-e86894722e1b-kube-api-access-j2b9b\") pod \"944027a3-2a5c-4b67-abda-e86894722e1b\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.914437 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-utilities\") pod \"944027a3-2a5c-4b67-abda-e86894722e1b\" (UID: \"944027a3-2a5c-4b67-abda-e86894722e1b\") " Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.914880 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-utilities" (OuterVolumeSpecName: "utilities") pod "944027a3-2a5c-4b67-abda-e86894722e1b" (UID: "944027a3-2a5c-4b67-abda-e86894722e1b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.919358 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/944027a3-2a5c-4b67-abda-e86894722e1b-kube-api-access-j2b9b" (OuterVolumeSpecName: "kube-api-access-j2b9b") pod "944027a3-2a5c-4b67-abda-e86894722e1b" (UID: "944027a3-2a5c-4b67-abda-e86894722e1b"). InnerVolumeSpecName "kube-api-access-j2b9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:05:20 crc kubenswrapper[4684]: I1013 14:05:20.927064 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "944027a3-2a5c-4b67-abda-e86894722e1b" (UID: "944027a3-2a5c-4b67-abda-e86894722e1b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.016863 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.016914 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2b9b\" (UniqueName: \"kubernetes.io/projected/944027a3-2a5c-4b67-abda-e86894722e1b-kube-api-access-j2b9b\") on node \"crc\" DevicePath \"\"" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.016932 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/944027a3-2a5c-4b67-abda-e86894722e1b-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.277593 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" event={"ID":"cfe4b515-b9df-4971-9705-212c2a24fdb0","Type":"ContainerStarted","Data":"df464cb4f188283ac25a7a9f200e6a4c8fc16bb0d7af281731e3e1ec177beece"} Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.281809 4684 generic.go:334] "Generic (PLEG): container finished" podID="944027a3-2a5c-4b67-abda-e86894722e1b" containerID="b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a" exitCode=0 Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.281861 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztv9t" event={"ID":"944027a3-2a5c-4b67-abda-e86894722e1b","Type":"ContainerDied","Data":"b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a"} Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.281889 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ztv9t" event={"ID":"944027a3-2a5c-4b67-abda-e86894722e1b","Type":"ContainerDied","Data":"3f4d408ffce48bd1444596cb4d6288b1f416b58e480640277e7271911f2dcfc3"} Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.281934 4684 scope.go:117] "RemoveContainer" containerID="b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.282109 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ztv9t" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.299954 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" podStartSLOduration=1.737637197 podStartE2EDuration="15.299928983s" podCreationTimestamp="2025-10-13 14:05:06 +0000 UTC" firstStartedPulling="2025-10-13 14:05:06.902432067 +0000 UTC m=+3461.469816137" lastFinishedPulling="2025-10-13 14:05:20.464723852 +0000 UTC m=+3475.032107923" observedRunningTime="2025-10-13 14:05:21.295943137 +0000 UTC m=+3475.863327217" watchObservedRunningTime="2025-10-13 14:05:21.299928983 +0000 UTC m=+3475.867313053" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.333323 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztv9t"] Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.334853 4684 scope.go:117] "RemoveContainer" containerID="97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.346557 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ztv9t"] Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.368840 4684 scope.go:117] "RemoveContainer" containerID="f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.397483 4684 scope.go:117] "RemoveContainer" containerID="b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a" Oct 13 14:05:21 crc kubenswrapper[4684]: E1013 14:05:21.398344 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a\": container with ID starting with b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a not found: ID does not exist" containerID="b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.398397 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a"} err="failed to get container status \"b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a\": rpc error: code = NotFound desc = could not find container \"b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a\": container with ID starting with b3104b9a6e3fcdaf6dbb824dfadb2d4b8b7c9b483e188b2a9d65996efd17b08a not found: ID does not exist" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.398426 4684 scope.go:117] "RemoveContainer" containerID="97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be" Oct 13 14:05:21 crc kubenswrapper[4684]: E1013 14:05:21.398870 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be\": container with ID starting with 97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be not found: ID does not exist" containerID="97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.398920 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be"} err="failed to get container status \"97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be\": rpc error: code = NotFound desc = could not find container \"97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be\": container with ID starting with 97e98b52858254168af2be8a9bddcecce1833ba24f0ead0d75c329e2c24eb1be not found: ID does not exist" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.398943 4684 scope.go:117] "RemoveContainer" containerID="f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b" Oct 13 14:05:21 crc kubenswrapper[4684]: E1013 14:05:21.399569 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b\": container with ID starting with f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b not found: ID does not exist" containerID="f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b" Oct 13 14:05:21 crc kubenswrapper[4684]: I1013 14:05:21.399616 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b"} err="failed to get container status \"f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b\": rpc error: code = NotFound desc = could not find container \"f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b\": container with ID starting with f1f3869b76086c3a52bc789984f9d2d3ac463cbbec4cf3da5b1e94033210441b not found: ID does not exist" Oct 13 14:05:22 crc kubenswrapper[4684]: I1013 14:05:22.361502 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" path="/var/lib/kubelet/pods/944027a3-2a5c-4b67-abda-e86894722e1b/volumes" Oct 13 14:05:57 crc kubenswrapper[4684]: I1013 14:05:57.667636 4684 generic.go:334] "Generic (PLEG): container finished" podID="cfe4b515-b9df-4971-9705-212c2a24fdb0" containerID="df464cb4f188283ac25a7a9f200e6a4c8fc16bb0d7af281731e3e1ec177beece" exitCode=0 Oct 13 14:05:57 crc kubenswrapper[4684]: I1013 14:05:57.667685 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" event={"ID":"cfe4b515-b9df-4971-9705-212c2a24fdb0","Type":"ContainerDied","Data":"df464cb4f188283ac25a7a9f200e6a4c8fc16bb0d7af281731e3e1ec177beece"} Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.768781 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.799651 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-2f85g"] Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.808586 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-2f85g"] Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.890757 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe4b515-b9df-4971-9705-212c2a24fdb0-host\") pod \"cfe4b515-b9df-4971-9705-212c2a24fdb0\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.891077 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wm82\" (UniqueName: \"kubernetes.io/projected/cfe4b515-b9df-4971-9705-212c2a24fdb0-kube-api-access-8wm82\") pod \"cfe4b515-b9df-4971-9705-212c2a24fdb0\" (UID: \"cfe4b515-b9df-4971-9705-212c2a24fdb0\") " Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.890925 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe4b515-b9df-4971-9705-212c2a24fdb0-host" (OuterVolumeSpecName: "host") pod "cfe4b515-b9df-4971-9705-212c2a24fdb0" (UID: "cfe4b515-b9df-4971-9705-212c2a24fdb0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.891491 4684 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe4b515-b9df-4971-9705-212c2a24fdb0-host\") on node \"crc\" DevicePath \"\"" Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.897302 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfe4b515-b9df-4971-9705-212c2a24fdb0-kube-api-access-8wm82" (OuterVolumeSpecName: "kube-api-access-8wm82") pod "cfe4b515-b9df-4971-9705-212c2a24fdb0" (UID: "cfe4b515-b9df-4971-9705-212c2a24fdb0"). InnerVolumeSpecName "kube-api-access-8wm82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:05:58 crc kubenswrapper[4684]: I1013 14:05:58.993013 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wm82\" (UniqueName: \"kubernetes.io/projected/cfe4b515-b9df-4971-9705-212c2a24fdb0-kube-api-access-8wm82\") on node \"crc\" DevicePath \"\"" Oct 13 14:05:59 crc kubenswrapper[4684]: I1013 14:05:59.688061 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d99c823ef3166890131c04189ab2ffd47a21a4e9aa171ba3fef5bd5a64e6a21" Oct 13 14:05:59 crc kubenswrapper[4684]: I1013 14:05:59.688362 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-2f85g" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010050 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-vxhkv"] Oct 13 14:06:00 crc kubenswrapper[4684]: E1013 14:06:00.010514 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="extract-utilities" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010529 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="extract-utilities" Oct 13 14:06:00 crc kubenswrapper[4684]: E1013 14:06:00.010539 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="registry-server" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010545 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="registry-server" Oct 13 14:06:00 crc kubenswrapper[4684]: E1013 14:06:00.010572 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe4b515-b9df-4971-9705-212c2a24fdb0" containerName="container-00" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010578 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe4b515-b9df-4971-9705-212c2a24fdb0" containerName="container-00" Oct 13 14:06:00 crc kubenswrapper[4684]: E1013 14:06:00.010590 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="extract-content" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010597 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="extract-content" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010784 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="944027a3-2a5c-4b67-abda-e86894722e1b" containerName="registry-server" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.010804 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe4b515-b9df-4971-9705-212c2a24fdb0" containerName="container-00" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.011475 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.112590 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3952f92-be0f-40b9-9bad-35765a410933-host\") pod \"crc-debug-vxhkv\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.112917 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fdsd\" (UniqueName: \"kubernetes.io/projected/f3952f92-be0f-40b9-9bad-35765a410933-kube-api-access-6fdsd\") pod \"crc-debug-vxhkv\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.215359 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3952f92-be0f-40b9-9bad-35765a410933-host\") pod \"crc-debug-vxhkv\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.215416 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fdsd\" (UniqueName: \"kubernetes.io/projected/f3952f92-be0f-40b9-9bad-35765a410933-kube-api-access-6fdsd\") pod \"crc-debug-vxhkv\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.215830 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3952f92-be0f-40b9-9bad-35765a410933-host\") pod \"crc-debug-vxhkv\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.248752 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fdsd\" (UniqueName: \"kubernetes.io/projected/f3952f92-be0f-40b9-9bad-35765a410933-kube-api-access-6fdsd\") pod \"crc-debug-vxhkv\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.330670 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.364522 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfe4b515-b9df-4971-9705-212c2a24fdb0" path="/var/lib/kubelet/pods/cfe4b515-b9df-4971-9705-212c2a24fdb0/volumes" Oct 13 14:06:00 crc kubenswrapper[4684]: W1013 14:06:00.368278 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3952f92_be0f_40b9_9bad_35765a410933.slice/crio-d6435e6156a90731b0d84c55e7cd8ce177eb9cc982527ab9ade0530e8afd7410 WatchSource:0}: Error finding container d6435e6156a90731b0d84c55e7cd8ce177eb9cc982527ab9ade0530e8afd7410: Status 404 returned error can't find the container with id d6435e6156a90731b0d84c55e7cd8ce177eb9cc982527ab9ade0530e8afd7410 Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.562664 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.563097 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.700068 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" event={"ID":"f3952f92-be0f-40b9-9bad-35765a410933","Type":"ContainerStarted","Data":"7179e3162b8efa0bec7e33d8143ae52f7a225d873e28cb928a3505f0a9bb8fbe"} Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.700113 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" event={"ID":"f3952f92-be0f-40b9-9bad-35765a410933","Type":"ContainerStarted","Data":"d6435e6156a90731b0d84c55e7cd8ce177eb9cc982527ab9ade0530e8afd7410"} Oct 13 14:06:00 crc kubenswrapper[4684]: I1013 14:06:00.722651 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" podStartSLOduration=1.722628352 podStartE2EDuration="1.722628352s" podCreationTimestamp="2025-10-13 14:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 14:06:00.72193977 +0000 UTC m=+3515.289323890" watchObservedRunningTime="2025-10-13 14:06:00.722628352 +0000 UTC m=+3515.290012442" Oct 13 14:06:01 crc kubenswrapper[4684]: I1013 14:06:01.713581 4684 generic.go:334] "Generic (PLEG): container finished" podID="f3952f92-be0f-40b9-9bad-35765a410933" containerID="7179e3162b8efa0bec7e33d8143ae52f7a225d873e28cb928a3505f0a9bb8fbe" exitCode=0 Oct 13 14:06:01 crc kubenswrapper[4684]: I1013 14:06:01.713665 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" event={"ID":"f3952f92-be0f-40b9-9bad-35765a410933","Type":"ContainerDied","Data":"7179e3162b8efa0bec7e33d8143ae52f7a225d873e28cb928a3505f0a9bb8fbe"} Oct 13 14:06:02 crc kubenswrapper[4684]: I1013 14:06:02.840423 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:02 crc kubenswrapper[4684]: I1013 14:06:02.874025 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-vxhkv"] Oct 13 14:06:02 crc kubenswrapper[4684]: I1013 14:06:02.883381 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-vxhkv"] Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.002835 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3952f92-be0f-40b9-9bad-35765a410933-host\") pod \"f3952f92-be0f-40b9-9bad-35765a410933\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.002943 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3952f92-be0f-40b9-9bad-35765a410933-host" (OuterVolumeSpecName: "host") pod "f3952f92-be0f-40b9-9bad-35765a410933" (UID: "f3952f92-be0f-40b9-9bad-35765a410933"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.003012 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fdsd\" (UniqueName: \"kubernetes.io/projected/f3952f92-be0f-40b9-9bad-35765a410933-kube-api-access-6fdsd\") pod \"f3952f92-be0f-40b9-9bad-35765a410933\" (UID: \"f3952f92-be0f-40b9-9bad-35765a410933\") " Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.003639 4684 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3952f92-be0f-40b9-9bad-35765a410933-host\") on node \"crc\" DevicePath \"\"" Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.017694 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3952f92-be0f-40b9-9bad-35765a410933-kube-api-access-6fdsd" (OuterVolumeSpecName: "kube-api-access-6fdsd") pod "f3952f92-be0f-40b9-9bad-35765a410933" (UID: "f3952f92-be0f-40b9-9bad-35765a410933"). InnerVolumeSpecName "kube-api-access-6fdsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.105983 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fdsd\" (UniqueName: \"kubernetes.io/projected/f3952f92-be0f-40b9-9bad-35765a410933-kube-api-access-6fdsd\") on node \"crc\" DevicePath \"\"" Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.734992 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6435e6156a90731b0d84c55e7cd8ce177eb9cc982527ab9ade0530e8afd7410" Oct 13 14:06:03 crc kubenswrapper[4684]: I1013 14:06:03.735058 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-vxhkv" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.145187 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-nnxpg"] Oct 13 14:06:04 crc kubenswrapper[4684]: E1013 14:06:04.145599 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3952f92-be0f-40b9-9bad-35765a410933" containerName="container-00" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.145611 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3952f92-be0f-40b9-9bad-35765a410933" containerName="container-00" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.145786 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3952f92-be0f-40b9-9bad-35765a410933" containerName="container-00" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.146369 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.226522 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlkbh\" (UniqueName: \"kubernetes.io/projected/252bb6df-1364-4a32-a606-85831bef3e2e-kube-api-access-dlkbh\") pod \"crc-debug-nnxpg\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.226704 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/252bb6df-1364-4a32-a606-85831bef3e2e-host\") pod \"crc-debug-nnxpg\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.328293 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/252bb6df-1364-4a32-a606-85831bef3e2e-host\") pod \"crc-debug-nnxpg\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.328463 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/252bb6df-1364-4a32-a606-85831bef3e2e-host\") pod \"crc-debug-nnxpg\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.328495 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlkbh\" (UniqueName: \"kubernetes.io/projected/252bb6df-1364-4a32-a606-85831bef3e2e-kube-api-access-dlkbh\") pod \"crc-debug-nnxpg\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.343262 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8dbf9d98-ml255_cafbe5b0-5ce7-4f2b-ac20-4f95592dc662/barbican-api/0.log" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.346703 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlkbh\" (UniqueName: \"kubernetes.io/projected/252bb6df-1364-4a32-a606-85831bef3e2e-kube-api-access-dlkbh\") pod \"crc-debug-nnxpg\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.360842 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3952f92-be0f-40b9-9bad-35765a410933" path="/var/lib/kubelet/pods/f3952f92-be0f-40b9-9bad-35765a410933/volumes" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.370926 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8dbf9d98-ml255_cafbe5b0-5ce7-4f2b-ac20-4f95592dc662/barbican-api-log/0.log" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.476381 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:04 crc kubenswrapper[4684]: W1013 14:06:04.517055 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod252bb6df_1364_4a32_a606_85831bef3e2e.slice/crio-73abc9b08d89babf41fcb4a2a4f704a9cec04c76723583476189623f2b09cbcb WatchSource:0}: Error finding container 73abc9b08d89babf41fcb4a2a4f704a9cec04c76723583476189623f2b09cbcb: Status 404 returned error can't find the container with id 73abc9b08d89babf41fcb4a2a4f704a9cec04c76723583476189623f2b09cbcb Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.547367 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-746497c59d-h7fpp_b3b2549e-a0de-4650-95fb-c3b8c8998664/barbican-keystone-listener/0.log" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.631057 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-746497c59d-h7fpp_b3b2549e-a0de-4650-95fb-c3b8c8998664/barbican-keystone-listener-log/0.log" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.746240 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" event={"ID":"252bb6df-1364-4a32-a606-85831bef3e2e","Type":"ContainerStarted","Data":"73abc9b08d89babf41fcb4a2a4f704a9cec04c76723583476189623f2b09cbcb"} Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.779565 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9b968d8f-c6bhw_290ff522-789b-4ba3-90d4-2047bf14a6de/barbican-worker/0.log" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.842979 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9b968d8f-c6bhw_290ff522-789b-4ba3-90d4-2047bf14a6de/barbican-worker-log/0.log" Oct 13 14:06:04 crc kubenswrapper[4684]: I1013 14:06:04.996353 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm_4bf002ba-ce9a-40ad-a860-0572fc61d996/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.217450 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/ceilometer-central-agent/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.217573 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/ceilometer-notification-agent/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.252658 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/proxy-httpd/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.382802 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/sg-core/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.512870 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e/cinder-api/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.600710 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e/cinder-api-log/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.713316 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_17d5a5ba-88b1-4769-a5fe-5f950804f332/cinder-scheduler/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.756231 4684 generic.go:334] "Generic (PLEG): container finished" podID="252bb6df-1364-4a32-a606-85831bef3e2e" containerID="fbadccd583c74850f6f565e06ca5d6f5de14e41fe29bb52d06fb0f6d6e3193ba" exitCode=0 Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.756291 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" event={"ID":"252bb6df-1364-4a32-a606-85831bef3e2e","Type":"ContainerDied","Data":"fbadccd583c74850f6f565e06ca5d6f5de14e41fe29bb52d06fb0f6d6e3193ba"} Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.798593 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_17d5a5ba-88b1-4769-a5fe-5f950804f332/probe/0.log" Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.799848 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-nnxpg"] Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.808269 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlmfj/crc-debug-nnxpg"] Oct 13 14:06:05 crc kubenswrapper[4684]: I1013 14:06:05.958057 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-c67wg_58124363-4632-4dec-894c-b3c3c289a6f0/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.044626 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f_0b60be99-cf7b-40e7-8c3b-539d082dd005/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.179748 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg_29952780-72b6-4f29-9d43-06e33d6dd41a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.236652 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67567868d9-g9dwp_c04a3f8c-5d8c-4c33-8964-e31de4003949/init/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.443152 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67567868d9-g9dwp_c04a3f8c-5d8c-4c33-8964-e31de4003949/dnsmasq-dns/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.455536 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67567868d9-g9dwp_c04a3f8c-5d8c-4c33-8964-e31de4003949/init/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.492301 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-28bmr_6c328864-8f33-4897-8fa7-9f0feee4fbf9/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.663430 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_80891967-a94b-4146-b440-cc217b235eee/glance-httpd/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.699970 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_80891967-a94b-4146-b440-cc217b235eee/glance-log/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.869796 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.890328 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_615ce132-dbef-40c4-afd3-871c94b552ed/glance-log/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.943769 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_615ce132-dbef-40c4-afd3-871c94b552ed/glance-httpd/0.log" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.987112 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlkbh\" (UniqueName: \"kubernetes.io/projected/252bb6df-1364-4a32-a606-85831bef3e2e-kube-api-access-dlkbh\") pod \"252bb6df-1364-4a32-a606-85831bef3e2e\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.987427 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/252bb6df-1364-4a32-a606-85831bef3e2e-host\") pod \"252bb6df-1364-4a32-a606-85831bef3e2e\" (UID: \"252bb6df-1364-4a32-a606-85831bef3e2e\") " Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.987580 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/252bb6df-1364-4a32-a606-85831bef3e2e-host" (OuterVolumeSpecName: "host") pod "252bb6df-1364-4a32-a606-85831bef3e2e" (UID: "252bb6df-1364-4a32-a606-85831bef3e2e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 14:06:06 crc kubenswrapper[4684]: I1013 14:06:06.987975 4684 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/252bb6df-1364-4a32-a606-85831bef3e2e-host\") on node \"crc\" DevicePath \"\"" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.002653 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252bb6df-1364-4a32-a606-85831bef3e2e-kube-api-access-dlkbh" (OuterVolumeSpecName: "kube-api-access-dlkbh") pod "252bb6df-1364-4a32-a606-85831bef3e2e" (UID: "252bb6df-1364-4a32-a606-85831bef3e2e"). InnerVolumeSpecName "kube-api-access-dlkbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.066531 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hj69t_7a51a7e3-30bb-4bce-889c-b13d919ef64c/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.091144 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlkbh\" (UniqueName: \"kubernetes.io/projected/252bb6df-1364-4a32-a606-85831bef3e2e-kube-api-access-dlkbh\") on node \"crc\" DevicePath \"\"" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.147567 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-kx22x_aabcf2cf-0d17-4864-a8f6-55220ed4c45c/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.341512 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29339401-8sg44_c58f1c18-97fd-4e58-a0e4-9bd84740c0f5/keystone-cron/0.log" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.432995 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5c87fbbfdb-hnmd8_6b42944c-2de0-47a6-bdb1-70750adb4c3c/keystone-api/0.log" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.718351 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_09ad2e1c-80bb-4900-8c6b-346959ee7994/kube-state-metrics/0.log" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.762139 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-cj82r_4281de73-4320-444b-9d71-877c9cf226a0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.774667 4684 scope.go:117] "RemoveContainer" containerID="fbadccd583c74850f6f565e06ca5d6f5de14e41fe29bb52d06fb0f6d6e3193ba" Oct 13 14:06:07 crc kubenswrapper[4684]: I1013 14:06:07.774729 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/crc-debug-nnxpg" Oct 13 14:06:08 crc kubenswrapper[4684]: I1013 14:06:08.010645 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594469df77-tzkgb_c1c77198-2e43-4230-a37d-48e65e09f466/neutron-api/0.log" Oct 13 14:06:08 crc kubenswrapper[4684]: I1013 14:06:08.068299 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594469df77-tzkgb_c1c77198-2e43-4230-a37d-48e65e09f466/neutron-httpd/0.log" Oct 13 14:06:08 crc kubenswrapper[4684]: I1013 14:06:08.209586 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7_976ab3d8-44f4-4005-a286-439105b3d942/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:08 crc kubenswrapper[4684]: I1013 14:06:08.360728 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="252bb6df-1364-4a32-a606-85831bef3e2e" path="/var/lib/kubelet/pods/252bb6df-1364-4a32-a606-85831bef3e2e/volumes" Oct 13 14:06:08 crc kubenswrapper[4684]: I1013 14:06:08.770657 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_f4a657cc-d0c5-4c0d-8171-ee4acd5788ce/nova-cell0-conductor-conductor/0.log" Oct 13 14:06:08 crc kubenswrapper[4684]: I1013 14:06:08.787278 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8332faf7-d7ae-421c-9f02-c17e93f044a5/nova-api-log/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.003190 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8332faf7-d7ae-421c-9f02-c17e93f044a5/nova-api-api/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.019439 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_183e5916-8b09-4bef-85d2-ee83326d865d/nova-cell1-conductor-conductor/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.113132 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_15cff776-4185-4994-b294-c4ba8e704bb1/nova-cell1-novncproxy-novncproxy/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.392504 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-qxhq2_777b6ddf-59c7-4afc-841b-098fe5353aea/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.417732 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6ce23c3b-fb2a-4762-9a9e-f3581e9150ca/nova-metadata-log/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.693226 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ab39473e-47b6-4570-b1f6-f81ee811c19f/mysql-bootstrap/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.735440 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_245c72eb-1ac1-4101-ae07-26a94bdae8e1/nova-scheduler-scheduler/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.930093 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ab39473e-47b6-4570-b1f6-f81ee811c19f/mysql-bootstrap/0.log" Oct 13 14:06:09 crc kubenswrapper[4684]: I1013 14:06:09.981692 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ab39473e-47b6-4570-b1f6-f81ee811c19f/galera/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.146753 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4d4341d2-642e-4c4c-b517-edb89e87d1f0/mysql-bootstrap/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.379343 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4d4341d2-642e-4c4c-b517-edb89e87d1f0/galera/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.390105 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4d4341d2-642e-4c4c-b517-edb89e87d1f0/mysql-bootstrap/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.593260 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6ce23c3b-fb2a-4762-9a9e-f3581e9150ca/nova-metadata-metadata/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.631590 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_706a05c9-47ec-4b10-a5de-227f67b3be61/openstackclient/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.634084 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jgthj_a984144e-7322-4045-a696-7ec4b746e061/ovn-controller/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.842098 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-fwm22_846b5496-bd3a-41b4-aef4-546dd2e85a83/openstack-network-exporter/0.log" Oct 13 14:06:10 crc kubenswrapper[4684]: I1013 14:06:10.870717 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovsdb-server-init/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.152788 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovsdb-server-init/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.197999 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovsdb-server/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.228653 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovs-vswitchd/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.447771 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-p8cjk_00a6e3dc-bdab-4eab-924c-37c33fecad3e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.473820 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a0f64874-c6da-4e22-b3ba-29679844c1e2/openstack-network-exporter/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.486728 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a0f64874-c6da-4e22-b3ba-29679844c1e2/ovn-northd/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.664930 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_40f06c76-a8c8-4f1c-a8b4-49fe81d3912e/openstack-network-exporter/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.735893 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_40f06c76-a8c8-4f1c-a8b4-49fe81d3912e/ovsdbserver-nb/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.882114 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b199f677-58e1-4a16-b904-5517b06a2b5e/ovsdbserver-sb/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.897318 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b199f677-58e1-4a16-b904-5517b06a2b5e/openstack-network-exporter/0.log" Oct 13 14:06:11 crc kubenswrapper[4684]: I1013 14:06:11.975084 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5854cb58cb-4hhrx_a8cfa6a6-f09c-4903-a8ce-df37542f7fd2/placement-api/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.157575 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5854cb58cb-4hhrx_a8cfa6a6-f09c-4903-a8ce-df37542f7fd2/placement-log/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.217797 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_26f3f75b-5e4f-4f4c-b8ec-53352400c7ef/setup-container/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.429830 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_26f3f75b-5e4f-4f4c-b8ec-53352400c7ef/setup-container/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.446732 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_26f3f75b-5e4f-4f4c-b8ec-53352400c7ef/rabbitmq/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.461888 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7db9b5b9-e5be-4555-bed6-2fd9d9159b40/setup-container/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.663686 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7db9b5b9-e5be-4555-bed6-2fd9d9159b40/setup-container/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.682538 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7db9b5b9-e5be-4555-bed6-2fd9d9159b40/rabbitmq/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.737853 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl_0a757d2e-3642-4d64-9cf8-e0b29e43bbb9/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:12 crc kubenswrapper[4684]: I1013 14:06:12.897111 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-dcvr6_3af107fc-1ddc-4c90-80c1-c3fed25bddcb/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.024225 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt_24b2196a-b383-4c65-9c61-992b7305c6ea/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.218459 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-l9jlr_1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11/ssh-known-hosts-edpm-deployment/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.222472 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8hz7n_6a0d23bf-ab14-453f-b23c-eebd64623b73/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.455046 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-56f9bb58f9-k8bsc_61ca624d-dfba-4a64-b08f-e96cc583a2b8/proxy-server/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.565833 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-56f9bb58f9-k8bsc_61ca624d-dfba-4a64-b08f-e96cc583a2b8/proxy-httpd/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.566466 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-m58km_b1d3d985-3822-439b-9e3b-629629e83b34/swift-ring-rebalance/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.801206 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-auditor/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.825849 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-reaper/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.836437 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-replicator/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.938337 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-server/0.log" Oct 13 14:06:13 crc kubenswrapper[4684]: I1013 14:06:13.983473 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-auditor/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.009628 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-server/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.076234 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-replicator/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.200163 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-updater/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.204278 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-auditor/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.205797 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-expirer/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.343858 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-replicator/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.390674 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-updater/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.426259 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-server/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.458223 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/rsync/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.611372 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/swift-recon-cron/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.683041 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-4zsck_5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.853398 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_cba49bf6-7402-47c1-bc2d-fc49dc6b0e30/tempest-tests-tempest-tests-runner/0.log" Oct 13 14:06:14 crc kubenswrapper[4684]: I1013 14:06:14.905314 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_77c83104-d58a-4883-ad94-dfdca3feb2d7/test-operator-logs-container/0.log" Oct 13 14:06:15 crc kubenswrapper[4684]: I1013 14:06:15.059338 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx_12fac5a6-e3fd-4017-9eee-36aa43193b0c/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:06:23 crc kubenswrapper[4684]: I1013 14:06:23.503480 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_73377942-2512-4398-b6ca-25aa9a591619/memcached/0.log" Oct 13 14:06:30 crc kubenswrapper[4684]: I1013 14:06:30.560348 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:06:30 crc kubenswrapper[4684]: I1013 14:06:30.560935 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.234153 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/util/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.458755 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/pull/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.476575 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/pull/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.482416 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/util/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.683732 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/pull/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.701861 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/util/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.723345 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/extract/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.872580 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-6mg9x_b3b72bfd-179d-4a2c-bbcf-eb318658886d/kube-rbac-proxy/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.931020 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-6mg9x_b3b72bfd-179d-4a2c-bbcf-eb318658886d/manager/0.log" Oct 13 14:06:37 crc kubenswrapper[4684]: I1013 14:06:37.952382 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-fb6dz_5db0ac5c-8b11-488c-8be7-14b040ddee3b/kube-rbac-proxy/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.094834 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-fb6dz_5db0ac5c-8b11-488c-8be7-14b040ddee3b/manager/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.145402 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-flhrp_67029ab5-dc79-4300-acc7-2e4ab2115809/kube-rbac-proxy/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.197003 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-flhrp_67029ab5-dc79-4300-acc7-2e4ab2115809/manager/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.312874 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-rn562_be62f268-d474-44c4-847d-3f43f4e3b724/kube-rbac-proxy/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.429738 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-rn562_be62f268-d474-44c4-847d-3f43f4e3b724/manager/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.524120 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-ggcts_23dafef9-64c0-4b93-b9e6-4c5d00a94a37/kube-rbac-proxy/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.557672 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-ggcts_23dafef9-64c0-4b93-b9e6-4c5d00a94a37/manager/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.640701 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-k9gct_f5eb77ba-26c3-431f-88aa-43c9ca4e1137/kube-rbac-proxy/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.716966 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-k9gct_f5eb77ba-26c3-431f-88aa-43c9ca4e1137/manager/0.log" Oct 13 14:06:38 crc kubenswrapper[4684]: I1013 14:06:38.820428 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-cdp57_de604670-9b37-401b-a41e-de24f939ddfa/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.026840 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-cdp57_de604670-9b37-401b-a41e-de24f939ddfa/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.051779 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-fpcxc_a89d3329-f603-48e0-a781-726e723ddba2/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.085668 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-fpcxc_a89d3329-f603-48e0-a781-726e723ddba2/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.198102 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-nw27r_c4cff4cc-1db4-4c49-821e-c24204a45224/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.314630 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-nw27r_c4cff4cc-1db4-4c49-821e-c24204a45224/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.373627 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-wmg4z_738e9416-e337-4ae6-89e3-40ce9e1843b6/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.454307 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-wmg4z_738e9416-e337-4ae6-89e3-40ce9e1843b6/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.553388 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-l2r6f_d99f99c5-94dd-4239-a410-b4983e91974f/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.580675 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-l2r6f_d99f99c5-94dd-4239-a410-b4983e91974f/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.731088 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-cmd4z_210a7413-8bab-4428-adc6-be6bb0037a24/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.804741 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-cmd4z_210a7413-8bab-4428-adc6-be6bb0037a24/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.826604 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-nrqjb_d6bea786-6aa7-4ad2-ae0f-5b61f4bde746/kube-rbac-proxy/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.978572 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-nrqjb_d6bea786-6aa7-4ad2-ae0f-5b61f4bde746/manager/0.log" Oct 13 14:06:39 crc kubenswrapper[4684]: I1013 14:06:39.990309 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-hhc52_f9b4d8c9-c763-42ce-bb43-3661fd211396/kube-rbac-proxy/0.log" Oct 13 14:06:40 crc kubenswrapper[4684]: I1013 14:06:40.059915 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-hhc52_f9b4d8c9-c763-42ce-bb43-3661fd211396/manager/0.log" Oct 13 14:06:40 crc kubenswrapper[4684]: I1013 14:06:40.201014 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55b7d44848pz9s5_a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8/kube-rbac-proxy/0.log" Oct 13 14:06:40 crc kubenswrapper[4684]: I1013 14:06:40.299241 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55b7d44848pz9s5_a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8/manager/0.log" Oct 13 14:06:40 crc kubenswrapper[4684]: I1013 14:06:40.352667 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fb8c88b76-kqqbz_a55aa929-2380-4b40-8d61-a9c00ea48536/kube-rbac-proxy/0.log" Oct 13 14:06:40 crc kubenswrapper[4684]: I1013 14:06:40.728226 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-64895cd698-7gflz_a5d7b84e-4a82-4671-b69f-ec15f4446875/kube-rbac-proxy/0.log" Oct 13 14:06:40 crc kubenswrapper[4684]: I1013 14:06:40.907959 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-64895cd698-7gflz_a5d7b84e-4a82-4671-b69f-ec15f4446875/operator/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.018986 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-6rh8x_f355c9e7-5235-4ab1-891a-006f5c66de34/registry-server/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.170166 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79df5fb58c-rjcff_fc06efe2-69bb-4b8c-bade-ac2f86aedf8f/kube-rbac-proxy/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.305077 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-l4m6r_1ea7dea4-73ae-4444-a1e8-9704cb1f32e5/kube-rbac-proxy/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.343192 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79df5fb58c-rjcff_fc06efe2-69bb-4b8c-bade-ac2f86aedf8f/manager/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.476120 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-l4m6r_1ea7dea4-73ae-4444-a1e8-9704cb1f32e5/manager/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.490854 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n_495f3276-fe48-4709-9e4e-2e57fbf3a5a6/operator/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.492306 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fb8c88b76-kqqbz_a55aa929-2380-4b40-8d61-a9c00ea48536/manager/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.734287 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-zbvph_77101b92-a0b5-4715-b426-918edba4833c/kube-rbac-proxy/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.740178 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-jjkmq_8fb7300d-97a2-4116-bc41-da6d686a12a1/kube-rbac-proxy/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.780735 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-zbvph_77101b92-a0b5-4715-b426-918edba4833c/manager/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.855868 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-jjkmq_8fb7300d-97a2-4116-bc41-da6d686a12a1/manager/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.930537 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-9cknq_34fad128-5332-4209-ba15-19cc84ccedb6/kube-rbac-proxy/0.log" Oct 13 14:06:41 crc kubenswrapper[4684]: I1013 14:06:41.973164 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-9cknq_34fad128-5332-4209-ba15-19cc84ccedb6/manager/0.log" Oct 13 14:06:42 crc kubenswrapper[4684]: I1013 14:06:42.087951 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-52z6n_ba30b561-090d-4067-98e4-215b0c717ce2/kube-rbac-proxy/0.log" Oct 13 14:06:42 crc kubenswrapper[4684]: I1013 14:06:42.113005 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-52z6n_ba30b561-090d-4067-98e4-215b0c717ce2/manager/0.log" Oct 13 14:06:58 crc kubenswrapper[4684]: I1013 14:06:58.514331 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-ptz8b_de6c8979-1887-4e08-9439-f6654ced778f/control-plane-machine-set-operator/0.log" Oct 13 14:06:58 crc kubenswrapper[4684]: I1013 14:06:58.762791 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-87shb_bf558883-5672-46d6-9d8a-a08070751a86/machine-api-operator/0.log" Oct 13 14:06:58 crc kubenswrapper[4684]: I1013 14:06:58.765445 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-87shb_bf558883-5672-46d6-9d8a-a08070751a86/kube-rbac-proxy/0.log" Oct 13 14:07:00 crc kubenswrapper[4684]: I1013 14:07:00.560258 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:07:00 crc kubenswrapper[4684]: I1013 14:07:00.560520 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:07:00 crc kubenswrapper[4684]: I1013 14:07:00.560564 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 14:07:00 crc kubenswrapper[4684]: I1013 14:07:00.561260 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 14:07:00 crc kubenswrapper[4684]: I1013 14:07:00.561312 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" gracePeriod=600 Oct 13 14:07:00 crc kubenswrapper[4684]: E1013 14:07:00.690874 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:07:01 crc kubenswrapper[4684]: I1013 14:07:01.262252 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" exitCode=0 Oct 13 14:07:01 crc kubenswrapper[4684]: I1013 14:07:01.262294 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7"} Oct 13 14:07:01 crc kubenswrapper[4684]: I1013 14:07:01.262369 4684 scope.go:117] "RemoveContainer" containerID="8f039b205cc277f559160266927df66f1bc5c5103396973c169e80ddca9064c9" Oct 13 14:07:01 crc kubenswrapper[4684]: I1013 14:07:01.264147 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:07:01 crc kubenswrapper[4684]: E1013 14:07:01.264613 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:07:10 crc kubenswrapper[4684]: I1013 14:07:10.340777 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-4xcpj_f65d7f2f-54ed-4d01-b143-82edecc32788/cert-manager-controller/0.log" Oct 13 14:07:10 crc kubenswrapper[4684]: I1013 14:07:10.483847 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-58lpp_d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4/cert-manager-cainjector/0.log" Oct 13 14:07:10 crc kubenswrapper[4684]: I1013 14:07:10.536639 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-ldckd_5689e64c-b30a-4009-ad81-2ace50352b94/cert-manager-webhook/0.log" Oct 13 14:07:12 crc kubenswrapper[4684]: I1013 14:07:12.351862 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:07:12 crc kubenswrapper[4684]: E1013 14:07:12.352704 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:07:22 crc kubenswrapper[4684]: I1013 14:07:22.056509 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-vkcdq_5608900f-c09f-4810-bde2-87588dadfe55/nmstate-console-plugin/0.log" Oct 13 14:07:22 crc kubenswrapper[4684]: I1013 14:07:22.196723 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xsv76_1fd2ac9e-de15-4598-a050-44f58f8e8a50/nmstate-handler/0.log" Oct 13 14:07:22 crc kubenswrapper[4684]: I1013 14:07:22.253886 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-r2xpx_aad74a49-543f-4f01-a171-0ca010ba7319/kube-rbac-proxy/0.log" Oct 13 14:07:22 crc kubenswrapper[4684]: I1013 14:07:22.298503 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-r2xpx_aad74a49-543f-4f01-a171-0ca010ba7319/nmstate-metrics/0.log" Oct 13 14:07:22 crc kubenswrapper[4684]: I1013 14:07:22.416119 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-c68bl_bf38af4f-4552-46e1-8011-0e8924331c2f/nmstate-operator/0.log" Oct 13 14:07:22 crc kubenswrapper[4684]: I1013 14:07:22.457261 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-hw69f_a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa/nmstate-webhook/0.log" Oct 13 14:07:27 crc kubenswrapper[4684]: I1013 14:07:27.351723 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:07:27 crc kubenswrapper[4684]: E1013 14:07:27.352682 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.373134 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-85wm6_ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac/kube-rbac-proxy/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.455653 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-85wm6_ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac/controller/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.621405 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.625343 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-nwgv6_642eea78-8230-4b4a-b1f3-1a96d1d8942f/frr-k8s-webhook-server/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.820271 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.863827 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.882864 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:07:35 crc kubenswrapper[4684]: I1013 14:07:35.882872 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.051162 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.080081 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.081850 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.098161 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.264268 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.264499 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.273285 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.330006 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/controller/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.440887 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/frr-metrics/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.479195 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/kube-rbac-proxy/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.536772 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/kube-rbac-proxy-frr/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.643720 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/reloader/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.771227 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-745c4c95f5-j54v2_c231fd74-600a-46ff-ba30-605a9445b002/manager/0.log" Oct 13 14:07:36 crc kubenswrapper[4684]: I1013 14:07:36.950972 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6cbcf8c94c-hkxm8_b2944a80-992b-4799-a461-82f7c2398295/webhook-server/0.log" Oct 13 14:07:37 crc kubenswrapper[4684]: I1013 14:07:37.146364 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hprww_2ff5f48b-1277-4a47-af89-e71172d731d6/kube-rbac-proxy/0.log" Oct 13 14:07:37 crc kubenswrapper[4684]: I1013 14:07:37.608314 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hprww_2ff5f48b-1277-4a47-af89-e71172d731d6/speaker/0.log" Oct 13 14:07:37 crc kubenswrapper[4684]: I1013 14:07:37.985662 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/frr/0.log" Oct 13 14:07:41 crc kubenswrapper[4684]: I1013 14:07:41.350241 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:07:41 crc kubenswrapper[4684]: E1013 14:07:41.350934 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.317425 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/util/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.480317 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/util/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.521069 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/pull/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.533041 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/pull/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.734062 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/pull/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.759451 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/util/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.778769 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/extract/0.log" Oct 13 14:07:49 crc kubenswrapper[4684]: I1013 14:07:49.899307 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-utilities/0.log" Oct 13 14:07:50 crc kubenswrapper[4684]: I1013 14:07:50.266715 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-content/0.log" Oct 13 14:07:50 crc kubenswrapper[4684]: I1013 14:07:50.276880 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-content/0.log" Oct 13 14:07:50 crc kubenswrapper[4684]: I1013 14:07:50.323947 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-utilities/0.log" Oct 13 14:07:50 crc kubenswrapper[4684]: I1013 14:07:50.532195 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-content/0.log" Oct 13 14:07:50 crc kubenswrapper[4684]: I1013 14:07:50.535261 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-utilities/0.log" Oct 13 14:07:50 crc kubenswrapper[4684]: I1013 14:07:50.740399 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-utilities/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.040853 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/registry-server/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.051485 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-utilities/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.068929 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-content/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.194394 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-content/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.285443 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-utilities/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.351754 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-content/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.544333 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/util/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.746521 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/pull/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.748066 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/pull/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.763690 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/registry-server/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.800105 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/util/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.970110 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/extract/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.982422 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/pull/0.log" Oct 13 14:07:51 crc kubenswrapper[4684]: I1013 14:07:51.986004 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/util/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.225549 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6t588_cc8221bd-07d6-42ed-b1dd-d81881844b60/marketplace-operator/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.251586 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-utilities/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.344693 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-utilities/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.419070 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-content/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.442981 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-content/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.602867 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-utilities/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.610936 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-content/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.771701 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/registry-server/0.log" Oct 13 14:07:52 crc kubenswrapper[4684]: I1013 14:07:52.792328 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-utilities/0.log" Oct 13 14:07:53 crc kubenswrapper[4684]: I1013 14:07:53.004927 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-utilities/0.log" Oct 13 14:07:53 crc kubenswrapper[4684]: I1013 14:07:53.009438 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-content/0.log" Oct 13 14:07:53 crc kubenswrapper[4684]: I1013 14:07:53.045253 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-content/0.log" Oct 13 14:07:53 crc kubenswrapper[4684]: I1013 14:07:53.204996 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-utilities/0.log" Oct 13 14:07:53 crc kubenswrapper[4684]: I1013 14:07:53.205259 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-content/0.log" Oct 13 14:07:53 crc kubenswrapper[4684]: I1013 14:07:53.766526 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/registry-server/0.log" Oct 13 14:07:54 crc kubenswrapper[4684]: I1013 14:07:54.351699 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:07:54 crc kubenswrapper[4684]: E1013 14:07:54.351887 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:08:05 crc kubenswrapper[4684]: I1013 14:08:05.351198 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:08:05 crc kubenswrapper[4684]: E1013 14:08:05.351940 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:08:19 crc kubenswrapper[4684]: I1013 14:08:19.350932 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:08:19 crc kubenswrapper[4684]: E1013 14:08:19.352316 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:08:32 crc kubenswrapper[4684]: I1013 14:08:32.351579 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:08:32 crc kubenswrapper[4684]: E1013 14:08:32.352668 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:08:47 crc kubenswrapper[4684]: I1013 14:08:47.351169 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:08:47 crc kubenswrapper[4684]: E1013 14:08:47.351868 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:09:02 crc kubenswrapper[4684]: I1013 14:09:02.351318 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:09:02 crc kubenswrapper[4684]: E1013 14:09:02.352303 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:09:13 crc kubenswrapper[4684]: I1013 14:09:13.350713 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:09:13 crc kubenswrapper[4684]: E1013 14:09:13.351412 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:09:27 crc kubenswrapper[4684]: I1013 14:09:27.351288 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:09:27 crc kubenswrapper[4684]: E1013 14:09:27.352114 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:09:28 crc kubenswrapper[4684]: I1013 14:09:28.727611 4684 generic.go:334] "Generic (PLEG): container finished" podID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerID="21ec255dded6e7df1d0398295d8e4327195faad60de6e1f22e7656d547aa4d82" exitCode=0 Oct 13 14:09:28 crc kubenswrapper[4684]: I1013 14:09:28.727729 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" event={"ID":"da586989-4b48-4d87-9aa7-64c80eb810cb","Type":"ContainerDied","Data":"21ec255dded6e7df1d0398295d8e4327195faad60de6e1f22e7656d547aa4d82"} Oct 13 14:09:28 crc kubenswrapper[4684]: I1013 14:09:28.728246 4684 scope.go:117] "RemoveContainer" containerID="21ec255dded6e7df1d0398295d8e4327195faad60de6e1f22e7656d547aa4d82" Oct 13 14:09:29 crc kubenswrapper[4684]: I1013 14:09:29.180017 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlmfj_must-gather-8lvtv_da586989-4b48-4d87-9aa7-64c80eb810cb/gather/0.log" Oct 13 14:09:31 crc kubenswrapper[4684]: E1013 14:09:31.067320 4684 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.212:53862->38.102.83.212:42087: write tcp 38.102.83.212:53862->38.102.83.212:42087: write: broken pipe Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.508013 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlmfj/must-gather-8lvtv"] Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.509773 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="copy" containerID="cri-o://7c658129b513d415745299380e5a6604a322379dc9db056d832905a39f9cd77f" gracePeriod=2 Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.517824 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlmfj/must-gather-8lvtv"] Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.834404 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlmfj_must-gather-8lvtv_da586989-4b48-4d87-9aa7-64c80eb810cb/copy/0.log" Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.835122 4684 generic.go:334] "Generic (PLEG): container finished" podID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerID="7c658129b513d415745299380e5a6604a322379dc9db056d832905a39f9cd77f" exitCode=143 Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.911214 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlmfj_must-gather-8lvtv_da586989-4b48-4d87-9aa7-64c80eb810cb/copy/0.log" Oct 13 14:09:36 crc kubenswrapper[4684]: I1013 14:09:36.912065 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.020012 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/da586989-4b48-4d87-9aa7-64c80eb810cb-must-gather-output\") pod \"da586989-4b48-4d87-9aa7-64c80eb810cb\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.020216 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rftk\" (UniqueName: \"kubernetes.io/projected/da586989-4b48-4d87-9aa7-64c80eb810cb-kube-api-access-4rftk\") pod \"da586989-4b48-4d87-9aa7-64c80eb810cb\" (UID: \"da586989-4b48-4d87-9aa7-64c80eb810cb\") " Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.026226 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da586989-4b48-4d87-9aa7-64c80eb810cb-kube-api-access-4rftk" (OuterVolumeSpecName: "kube-api-access-4rftk") pod "da586989-4b48-4d87-9aa7-64c80eb810cb" (UID: "da586989-4b48-4d87-9aa7-64c80eb810cb"). InnerVolumeSpecName "kube-api-access-4rftk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.123694 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rftk\" (UniqueName: \"kubernetes.io/projected/da586989-4b48-4d87-9aa7-64c80eb810cb-kube-api-access-4rftk\") on node \"crc\" DevicePath \"\"" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.159798 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da586989-4b48-4d87-9aa7-64c80eb810cb-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "da586989-4b48-4d87-9aa7-64c80eb810cb" (UID: "da586989-4b48-4d87-9aa7-64c80eb810cb"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.224876 4684 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/da586989-4b48-4d87-9aa7-64c80eb810cb-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.842854 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlmfj_must-gather-8lvtv_da586989-4b48-4d87-9aa7-64c80eb810cb/copy/0.log" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.843194 4684 scope.go:117] "RemoveContainer" containerID="7c658129b513d415745299380e5a6604a322379dc9db056d832905a39f9cd77f" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.843229 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlmfj/must-gather-8lvtv" Oct 13 14:09:37 crc kubenswrapper[4684]: I1013 14:09:37.864368 4684 scope.go:117] "RemoveContainer" containerID="21ec255dded6e7df1d0398295d8e4327195faad60de6e1f22e7656d547aa4d82" Oct 13 14:09:38 crc kubenswrapper[4684]: I1013 14:09:38.385697 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" path="/var/lib/kubelet/pods/da586989-4b48-4d87-9aa7-64c80eb810cb/volumes" Oct 13 14:09:39 crc kubenswrapper[4684]: I1013 14:09:39.351351 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:09:39 crc kubenswrapper[4684]: E1013 14:09:39.352250 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:09:50 crc kubenswrapper[4684]: I1013 14:09:50.350656 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:09:50 crc kubenswrapper[4684]: E1013 14:09:50.351454 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:10:02 crc kubenswrapper[4684]: I1013 14:10:02.352184 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:10:02 crc kubenswrapper[4684]: E1013 14:10:02.353207 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:10:16 crc kubenswrapper[4684]: I1013 14:10:16.360702 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:10:16 crc kubenswrapper[4684]: E1013 14:10:16.361580 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.146387 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pklvl/must-gather-llxnv"] Oct 13 14:10:25 crc kubenswrapper[4684]: E1013 14:10:25.147438 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252bb6df-1364-4a32-a606-85831bef3e2e" containerName="container-00" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.147457 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="252bb6df-1364-4a32-a606-85831bef3e2e" containerName="container-00" Oct 13 14:10:25 crc kubenswrapper[4684]: E1013 14:10:25.147491 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="gather" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.147499 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="gather" Oct 13 14:10:25 crc kubenswrapper[4684]: E1013 14:10:25.147526 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="copy" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.147535 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="copy" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.147802 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="gather" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.147828 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="da586989-4b48-4d87-9aa7-64c80eb810cb" containerName="copy" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.147844 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="252bb6df-1364-4a32-a606-85831bef3e2e" containerName="container-00" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.151471 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.153139 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pklvl"/"openshift-service-ca.crt" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.172143 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-pklvl"/"kube-root-ca.crt" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.175020 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pklvl/must-gather-llxnv"] Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.192426 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/06a28563-d823-4490-b3e2-af173ee1b16c-must-gather-output\") pod \"must-gather-llxnv\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.192472 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6dqd\" (UniqueName: \"kubernetes.io/projected/06a28563-d823-4490-b3e2-af173ee1b16c-kube-api-access-b6dqd\") pod \"must-gather-llxnv\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.293370 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/06a28563-d823-4490-b3e2-af173ee1b16c-must-gather-output\") pod \"must-gather-llxnv\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.293425 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6dqd\" (UniqueName: \"kubernetes.io/projected/06a28563-d823-4490-b3e2-af173ee1b16c-kube-api-access-b6dqd\") pod \"must-gather-llxnv\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.294214 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/06a28563-d823-4490-b3e2-af173ee1b16c-must-gather-output\") pod \"must-gather-llxnv\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.332505 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6dqd\" (UniqueName: \"kubernetes.io/projected/06a28563-d823-4490-b3e2-af173ee1b16c-kube-api-access-b6dqd\") pod \"must-gather-llxnv\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:25 crc kubenswrapper[4684]: I1013 14:10:25.473217 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:10:26 crc kubenswrapper[4684]: I1013 14:10:26.008236 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-pklvl/must-gather-llxnv"] Oct 13 14:10:26 crc kubenswrapper[4684]: I1013 14:10:26.295117 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/must-gather-llxnv" event={"ID":"06a28563-d823-4490-b3e2-af173ee1b16c","Type":"ContainerStarted","Data":"a3b18e5e9e999d75b463d5450af166c3e8a89d8c3594e0dc2825518c77fd2233"} Oct 13 14:10:26 crc kubenswrapper[4684]: I1013 14:10:26.295405 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/must-gather-llxnv" event={"ID":"06a28563-d823-4490-b3e2-af173ee1b16c","Type":"ContainerStarted","Data":"73279d646e251fc317f991bedcbdccb98caad83d42a635223e41baaf44e51fbe"} Oct 13 14:10:27 crc kubenswrapper[4684]: I1013 14:10:27.306285 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/must-gather-llxnv" event={"ID":"06a28563-d823-4490-b3e2-af173ee1b16c","Type":"ContainerStarted","Data":"f50adaa6fe093a8f0b7ade245e8e5276b58842012588455ee80305ba7b87a829"} Oct 13 14:10:27 crc kubenswrapper[4684]: I1013 14:10:27.326599 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pklvl/must-gather-llxnv" podStartSLOduration=2.326581384 podStartE2EDuration="2.326581384s" podCreationTimestamp="2025-10-13 14:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 14:10:27.323874209 +0000 UTC m=+3781.891258279" watchObservedRunningTime="2025-10-13 14:10:27.326581384 +0000 UTC m=+3781.893965454" Oct 13 14:10:29 crc kubenswrapper[4684]: E1013 14:10:29.249399 4684 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.212:43852->38.102.83.212:42087: write tcp 38.102.83.212:43852->38.102.83.212:42087: write: broken pipe Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.046469 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pklvl/crc-debug-jgb2z"] Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.048150 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.050965 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pklvl"/"default-dockercfg-drzbc" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.234243 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-host\") pod \"crc-debug-jgb2z\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.234363 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff2rc\" (UniqueName: \"kubernetes.io/projected/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-kube-api-access-ff2rc\") pod \"crc-debug-jgb2z\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.337208 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff2rc\" (UniqueName: \"kubernetes.io/projected/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-kube-api-access-ff2rc\") pod \"crc-debug-jgb2z\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.338142 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-host\") pod \"crc-debug-jgb2z\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.338334 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-host\") pod \"crc-debug-jgb2z\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.356494 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:10:30 crc kubenswrapper[4684]: E1013 14:10:30.356935 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.367680 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff2rc\" (UniqueName: \"kubernetes.io/projected/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-kube-api-access-ff2rc\") pod \"crc-debug-jgb2z\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: I1013 14:10:30.666217 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:10:30 crc kubenswrapper[4684]: W1013 14:10:30.694343 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd1f6b6c_a645_473a_95e0_7bf9c87aab01.slice/crio-cb8b540d01fcf5a9ce978c786db01ad7c039ae172fc8c67f626ed3fce4da95ce WatchSource:0}: Error finding container cb8b540d01fcf5a9ce978c786db01ad7c039ae172fc8c67f626ed3fce4da95ce: Status 404 returned error can't find the container with id cb8b540d01fcf5a9ce978c786db01ad7c039ae172fc8c67f626ed3fce4da95ce Oct 13 14:10:31 crc kubenswrapper[4684]: I1013 14:10:31.342398 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" event={"ID":"dd1f6b6c-a645-473a-95e0-7bf9c87aab01","Type":"ContainerStarted","Data":"4ac2ba7d54159699efd15035f19d864545377d5c26f5c8f1d7d860efd45e9a4b"} Oct 13 14:10:31 crc kubenswrapper[4684]: I1013 14:10:31.342741 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" event={"ID":"dd1f6b6c-a645-473a-95e0-7bf9c87aab01","Type":"ContainerStarted","Data":"cb8b540d01fcf5a9ce978c786db01ad7c039ae172fc8c67f626ed3fce4da95ce"} Oct 13 14:10:31 crc kubenswrapper[4684]: I1013 14:10:31.355901 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" podStartSLOduration=1.355878514 podStartE2EDuration="1.355878514s" podCreationTimestamp="2025-10-13 14:10:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 14:10:31.352831039 +0000 UTC m=+3785.920215109" watchObservedRunningTime="2025-10-13 14:10:31.355878514 +0000 UTC m=+3785.923262594" Oct 13 14:10:45 crc kubenswrapper[4684]: I1013 14:10:45.350678 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:10:45 crc kubenswrapper[4684]: E1013 14:10:45.351627 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:10:56 crc kubenswrapper[4684]: I1013 14:10:56.358703 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:10:56 crc kubenswrapper[4684]: E1013 14:10:56.359608 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:11:04 crc kubenswrapper[4684]: I1013 14:11:04.663871 4684 generic.go:334] "Generic (PLEG): container finished" podID="dd1f6b6c-a645-473a-95e0-7bf9c87aab01" containerID="4ac2ba7d54159699efd15035f19d864545377d5c26f5c8f1d7d860efd45e9a4b" exitCode=0 Oct 13 14:11:04 crc kubenswrapper[4684]: I1013 14:11:04.663952 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" event={"ID":"dd1f6b6c-a645-473a-95e0-7bf9c87aab01","Type":"ContainerDied","Data":"4ac2ba7d54159699efd15035f19d864545377d5c26f5c8f1d7d860efd45e9a4b"} Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.789257 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.817518 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pklvl/crc-debug-jgb2z"] Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.829353 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pklvl/crc-debug-jgb2z"] Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.876526 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-host\") pod \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.876673 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-host" (OuterVolumeSpecName: "host") pod "dd1f6b6c-a645-473a-95e0-7bf9c87aab01" (UID: "dd1f6b6c-a645-473a-95e0-7bf9c87aab01"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.876805 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff2rc\" (UniqueName: \"kubernetes.io/projected/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-kube-api-access-ff2rc\") pod \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\" (UID: \"dd1f6b6c-a645-473a-95e0-7bf9c87aab01\") " Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.877291 4684 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-host\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.888813 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-kube-api-access-ff2rc" (OuterVolumeSpecName: "kube-api-access-ff2rc") pod "dd1f6b6c-a645-473a-95e0-7bf9c87aab01" (UID: "dd1f6b6c-a645-473a-95e0-7bf9c87aab01"). InnerVolumeSpecName "kube-api-access-ff2rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:11:05 crc kubenswrapper[4684]: I1013 14:11:05.979459 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff2rc\" (UniqueName: \"kubernetes.io/projected/dd1f6b6c-a645-473a-95e0-7bf9c87aab01-kube-api-access-ff2rc\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.368415 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd1f6b6c-a645-473a-95e0-7bf9c87aab01" path="/var/lib/kubelet/pods/dd1f6b6c-a645-473a-95e0-7bf9c87aab01/volumes" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.681818 4684 scope.go:117] "RemoveContainer" containerID="4ac2ba7d54159699efd15035f19d864545377d5c26f5c8f1d7d860efd45e9a4b" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.681863 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-jgb2z" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.997927 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pklvl/crc-debug-6v492"] Oct 13 14:11:06 crc kubenswrapper[4684]: E1013 14:11:06.998315 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd1f6b6c-a645-473a-95e0-7bf9c87aab01" containerName="container-00" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.998329 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd1f6b6c-a645-473a-95e0-7bf9c87aab01" containerName="container-00" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.998516 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd1f6b6c-a645-473a-95e0-7bf9c87aab01" containerName="container-00" Oct 13 14:11:06 crc kubenswrapper[4684]: I1013 14:11:06.999793 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.001530 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-pklvl"/"default-dockercfg-drzbc" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.098596 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhr7l\" (UniqueName: \"kubernetes.io/projected/7a417a31-16ab-41ac-9407-78cb152adf43-kube-api-access-zhr7l\") pod \"crc-debug-6v492\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.099105 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a417a31-16ab-41ac-9407-78cb152adf43-host\") pod \"crc-debug-6v492\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.200672 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a417a31-16ab-41ac-9407-78cb152adf43-host\") pod \"crc-debug-6v492\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.200809 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhr7l\" (UniqueName: \"kubernetes.io/projected/7a417a31-16ab-41ac-9407-78cb152adf43-kube-api-access-zhr7l\") pod \"crc-debug-6v492\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.201222 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a417a31-16ab-41ac-9407-78cb152adf43-host\") pod \"crc-debug-6v492\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.219617 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhr7l\" (UniqueName: \"kubernetes.io/projected/7a417a31-16ab-41ac-9407-78cb152adf43-kube-api-access-zhr7l\") pod \"crc-debug-6v492\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.319384 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.690249 4684 generic.go:334] "Generic (PLEG): container finished" podID="7a417a31-16ab-41ac-9407-78cb152adf43" containerID="c1f0b74090c1442b2eb987e59f83d8cd89fa47e0f8076aab5b092f357951fdcd" exitCode=0 Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.690337 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-6v492" event={"ID":"7a417a31-16ab-41ac-9407-78cb152adf43","Type":"ContainerDied","Data":"c1f0b74090c1442b2eb987e59f83d8cd89fa47e0f8076aab5b092f357951fdcd"} Oct 13 14:11:07 crc kubenswrapper[4684]: I1013 14:11:07.690551 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-6v492" event={"ID":"7a417a31-16ab-41ac-9407-78cb152adf43","Type":"ContainerStarted","Data":"8db2ab53f6846232045cdf59811ba24d249dde32b042f1b5e2eb8bdb56d82c7a"} Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.120822 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pklvl/crc-debug-6v492"] Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.128285 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pklvl/crc-debug-6v492"] Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.813752 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.928448 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhr7l\" (UniqueName: \"kubernetes.io/projected/7a417a31-16ab-41ac-9407-78cb152adf43-kube-api-access-zhr7l\") pod \"7a417a31-16ab-41ac-9407-78cb152adf43\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.928834 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a417a31-16ab-41ac-9407-78cb152adf43-host\") pod \"7a417a31-16ab-41ac-9407-78cb152adf43\" (UID: \"7a417a31-16ab-41ac-9407-78cb152adf43\") " Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.928959 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a417a31-16ab-41ac-9407-78cb152adf43-host" (OuterVolumeSpecName: "host") pod "7a417a31-16ab-41ac-9407-78cb152adf43" (UID: "7a417a31-16ab-41ac-9407-78cb152adf43"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.929592 4684 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a417a31-16ab-41ac-9407-78cb152adf43-host\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:08 crc kubenswrapper[4684]: I1013 14:11:08.935140 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a417a31-16ab-41ac-9407-78cb152adf43-kube-api-access-zhr7l" (OuterVolumeSpecName: "kube-api-access-zhr7l") pod "7a417a31-16ab-41ac-9407-78cb152adf43" (UID: "7a417a31-16ab-41ac-9407-78cb152adf43"). InnerVolumeSpecName "kube-api-access-zhr7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.031961 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhr7l\" (UniqueName: \"kubernetes.io/projected/7a417a31-16ab-41ac-9407-78cb152adf43-kube-api-access-zhr7l\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.308843 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-pklvl/crc-debug-q76n2"] Oct 13 14:11:09 crc kubenswrapper[4684]: E1013 14:11:09.309238 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a417a31-16ab-41ac-9407-78cb152adf43" containerName="container-00" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.309250 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a417a31-16ab-41ac-9407-78cb152adf43" containerName="container-00" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.309464 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a417a31-16ab-41ac-9407-78cb152adf43" containerName="container-00" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.310070 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.337783 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57fbb\" (UniqueName: \"kubernetes.io/projected/c2ab3d63-d71f-4ed3-9d03-486eb665312d-kube-api-access-57fbb\") pod \"crc-debug-q76n2\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.337888 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2ab3d63-d71f-4ed3-9d03-486eb665312d-host\") pod \"crc-debug-q76n2\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.440232 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2ab3d63-d71f-4ed3-9d03-486eb665312d-host\") pod \"crc-debug-q76n2\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.440374 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2ab3d63-d71f-4ed3-9d03-486eb665312d-host\") pod \"crc-debug-q76n2\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.440475 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57fbb\" (UniqueName: \"kubernetes.io/projected/c2ab3d63-d71f-4ed3-9d03-486eb665312d-kube-api-access-57fbb\") pod \"crc-debug-q76n2\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.467629 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57fbb\" (UniqueName: \"kubernetes.io/projected/c2ab3d63-d71f-4ed3-9d03-486eb665312d-kube-api-access-57fbb\") pod \"crc-debug-q76n2\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.628662 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:09 crc kubenswrapper[4684]: W1013 14:11:09.655780 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2ab3d63_d71f_4ed3_9d03_486eb665312d.slice/crio-36335243672f028315ab6e4626f34b26248ee4e86b938da1d6a27dbe924e96ae WatchSource:0}: Error finding container 36335243672f028315ab6e4626f34b26248ee4e86b938da1d6a27dbe924e96ae: Status 404 returned error can't find the container with id 36335243672f028315ab6e4626f34b26248ee4e86b938da1d6a27dbe924e96ae Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.707553 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-6v492" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.710970 4684 scope.go:117] "RemoveContainer" containerID="c1f0b74090c1442b2eb987e59f83d8cd89fa47e0f8076aab5b092f357951fdcd" Oct 13 14:11:09 crc kubenswrapper[4684]: I1013 14:11:09.712262 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-q76n2" event={"ID":"c2ab3d63-d71f-4ed3-9d03-486eb665312d","Type":"ContainerStarted","Data":"36335243672f028315ab6e4626f34b26248ee4e86b938da1d6a27dbe924e96ae"} Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.364169 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a417a31-16ab-41ac-9407-78cb152adf43" path="/var/lib/kubelet/pods/7a417a31-16ab-41ac-9407-78cb152adf43/volumes" Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.722977 4684 generic.go:334] "Generic (PLEG): container finished" podID="c2ab3d63-d71f-4ed3-9d03-486eb665312d" containerID="ca64b6a34c51cc2a94911e742adb766e7b25b4070ec4d86bb667bfa84758c5b3" exitCode=0 Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.723092 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/crc-debug-q76n2" event={"ID":"c2ab3d63-d71f-4ed3-9d03-486eb665312d","Type":"ContainerDied","Data":"ca64b6a34c51cc2a94911e742adb766e7b25b4070ec4d86bb667bfa84758c5b3"} Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.762184 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pklvl/crc-debug-q76n2"] Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.770694 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pklvl/crc-debug-q76n2"] Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.947238 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wd96l"] Oct 13 14:11:10 crc kubenswrapper[4684]: E1013 14:11:10.948209 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ab3d63-d71f-4ed3-9d03-486eb665312d" containerName="container-00" Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.948225 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ab3d63-d71f-4ed3-9d03-486eb665312d" containerName="container-00" Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.948670 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2ab3d63-d71f-4ed3-9d03-486eb665312d" containerName="container-00" Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.954459 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:10 crc kubenswrapper[4684]: I1013 14:11:10.987802 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wd96l"] Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.074666 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-catalog-content\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.074720 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-utilities\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.074769 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr25w\" (UniqueName: \"kubernetes.io/projected/f1afefc8-4d59-4463-8943-a18f3c141455-kube-api-access-dr25w\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.176987 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-catalog-content\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.177329 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-utilities\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.177372 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr25w\" (UniqueName: \"kubernetes.io/projected/f1afefc8-4d59-4463-8943-a18f3c141455-kube-api-access-dr25w\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.177659 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-catalog-content\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.177972 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-utilities\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.197149 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr25w\" (UniqueName: \"kubernetes.io/projected/f1afefc8-4d59-4463-8943-a18f3c141455-kube-api-access-dr25w\") pod \"certified-operators-wd96l\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.276715 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.350653 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:11:11 crc kubenswrapper[4684]: E1013 14:11:11.350888 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.793566 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wd96l"] Oct 13 14:11:11 crc kubenswrapper[4684]: W1013 14:11:11.802183 4684 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1afefc8_4d59_4463_8943_a18f3c141455.slice/crio-b6852648b353efb3e5619316debf1a8dbfea3ae140c9ff20034e3c241a4a0d23 WatchSource:0}: Error finding container b6852648b353efb3e5619316debf1a8dbfea3ae140c9ff20034e3c241a4a0d23: Status 404 returned error can't find the container with id b6852648b353efb3e5619316debf1a8dbfea3ae140c9ff20034e3c241a4a0d23 Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.875694 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.989289 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2ab3d63-d71f-4ed3-9d03-486eb665312d-host\") pod \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.989416 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c2ab3d63-d71f-4ed3-9d03-486eb665312d-host" (OuterVolumeSpecName: "host") pod "c2ab3d63-d71f-4ed3-9d03-486eb665312d" (UID: "c2ab3d63-d71f-4ed3-9d03-486eb665312d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.989535 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57fbb\" (UniqueName: \"kubernetes.io/projected/c2ab3d63-d71f-4ed3-9d03-486eb665312d-kube-api-access-57fbb\") pod \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\" (UID: \"c2ab3d63-d71f-4ed3-9d03-486eb665312d\") " Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.990080 4684 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2ab3d63-d71f-4ed3-9d03-486eb665312d-host\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:11 crc kubenswrapper[4684]: I1013 14:11:11.997221 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2ab3d63-d71f-4ed3-9d03-486eb665312d-kube-api-access-57fbb" (OuterVolumeSpecName: "kube-api-access-57fbb") pod "c2ab3d63-d71f-4ed3-9d03-486eb665312d" (UID: "c2ab3d63-d71f-4ed3-9d03-486eb665312d"). InnerVolumeSpecName "kube-api-access-57fbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.092170 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57fbb\" (UniqueName: \"kubernetes.io/projected/c2ab3d63-d71f-4ed3-9d03-486eb665312d-kube-api-access-57fbb\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.361475 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2ab3d63-d71f-4ed3-9d03-486eb665312d" path="/var/lib/kubelet/pods/c2ab3d63-d71f-4ed3-9d03-486eb665312d/volumes" Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.743123 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/crc-debug-q76n2" Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.743214 4684 scope.go:117] "RemoveContainer" containerID="ca64b6a34c51cc2a94911e742adb766e7b25b4070ec4d86bb667bfa84758c5b3" Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.745148 4684 generic.go:334] "Generic (PLEG): container finished" podID="f1afefc8-4d59-4463-8943-a18f3c141455" containerID="84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a" exitCode=0 Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.745192 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerDied","Data":"84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a"} Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.745223 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerStarted","Data":"b6852648b353efb3e5619316debf1a8dbfea3ae140c9ff20034e3c241a4a0d23"} Oct 13 14:11:12 crc kubenswrapper[4684]: I1013 14:11:12.747151 4684 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 14:11:13 crc kubenswrapper[4684]: I1013 14:11:13.756335 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerStarted","Data":"101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58"} Oct 13 14:11:14 crc kubenswrapper[4684]: I1013 14:11:14.765583 4684 generic.go:334] "Generic (PLEG): container finished" podID="f1afefc8-4d59-4463-8943-a18f3c141455" containerID="101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58" exitCode=0 Oct 13 14:11:14 crc kubenswrapper[4684]: I1013 14:11:14.765808 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerDied","Data":"101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58"} Oct 13 14:11:15 crc kubenswrapper[4684]: I1013 14:11:15.777394 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerStarted","Data":"5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c"} Oct 13 14:11:15 crc kubenswrapper[4684]: I1013 14:11:15.798098 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wd96l" podStartSLOduration=3.337129612 podStartE2EDuration="5.798081069s" podCreationTimestamp="2025-10-13 14:11:10 +0000 UTC" firstStartedPulling="2025-10-13 14:11:12.746922048 +0000 UTC m=+3827.314306118" lastFinishedPulling="2025-10-13 14:11:15.207873505 +0000 UTC m=+3829.775257575" observedRunningTime="2025-10-13 14:11:15.796358494 +0000 UTC m=+3830.363742594" watchObservedRunningTime="2025-10-13 14:11:15.798081069 +0000 UTC m=+3830.365465139" Oct 13 14:11:21 crc kubenswrapper[4684]: I1013 14:11:21.277198 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:21 crc kubenswrapper[4684]: I1013 14:11:21.277934 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:21 crc kubenswrapper[4684]: I1013 14:11:21.327801 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:21 crc kubenswrapper[4684]: I1013 14:11:21.905629 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:21 crc kubenswrapper[4684]: I1013 14:11:21.954070 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wd96l"] Oct 13 14:11:23 crc kubenswrapper[4684]: I1013 14:11:23.848173 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wd96l" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="registry-server" containerID="cri-o://5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c" gracePeriod=2 Oct 13 14:11:23 crc kubenswrapper[4684]: E1013 14:11:23.898504 4684 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1afefc8_4d59_4463_8943_a18f3c141455.slice/crio-5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c.scope\": RecentStats: unable to find data in memory cache]" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.398262 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.435342 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-catalog-content\") pod \"f1afefc8-4d59-4463-8943-a18f3c141455\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.435472 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-utilities\") pod \"f1afefc8-4d59-4463-8943-a18f3c141455\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.435622 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr25w\" (UniqueName: \"kubernetes.io/projected/f1afefc8-4d59-4463-8943-a18f3c141455-kube-api-access-dr25w\") pod \"f1afefc8-4d59-4463-8943-a18f3c141455\" (UID: \"f1afefc8-4d59-4463-8943-a18f3c141455\") " Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.438348 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-utilities" (OuterVolumeSpecName: "utilities") pod "f1afefc8-4d59-4463-8943-a18f3c141455" (UID: "f1afefc8-4d59-4463-8943-a18f3c141455"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.443548 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1afefc8-4d59-4463-8943-a18f3c141455-kube-api-access-dr25w" (OuterVolumeSpecName: "kube-api-access-dr25w") pod "f1afefc8-4d59-4463-8943-a18f3c141455" (UID: "f1afefc8-4d59-4463-8943-a18f3c141455"). InnerVolumeSpecName "kube-api-access-dr25w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.478646 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1afefc8-4d59-4463-8943-a18f3c141455" (UID: "f1afefc8-4d59-4463-8943-a18f3c141455"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.537775 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.537809 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1afefc8-4d59-4463-8943-a18f3c141455-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.537823 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr25w\" (UniqueName: \"kubernetes.io/projected/f1afefc8-4d59-4463-8943-a18f3c141455-kube-api-access-dr25w\") on node \"crc\" DevicePath \"\"" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.861045 4684 generic.go:334] "Generic (PLEG): container finished" podID="f1afefc8-4d59-4463-8943-a18f3c141455" containerID="5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c" exitCode=0 Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.861093 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerDied","Data":"5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c"} Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.861126 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wd96l" event={"ID":"f1afefc8-4d59-4463-8943-a18f3c141455","Type":"ContainerDied","Data":"b6852648b353efb3e5619316debf1a8dbfea3ae140c9ff20034e3c241a4a0d23"} Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.861145 4684 scope.go:117] "RemoveContainer" containerID="5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.861140 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wd96l" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.896822 4684 scope.go:117] "RemoveContainer" containerID="101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.924806 4684 scope.go:117] "RemoveContainer" containerID="84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.924954 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wd96l"] Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.938559 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wd96l"] Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.995849 4684 scope.go:117] "RemoveContainer" containerID="5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c" Oct 13 14:11:24 crc kubenswrapper[4684]: E1013 14:11:24.996317 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c\": container with ID starting with 5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c not found: ID does not exist" containerID="5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.996362 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c"} err="failed to get container status \"5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c\": rpc error: code = NotFound desc = could not find container \"5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c\": container with ID starting with 5f2f57c2e69eb259990638d2b0b723ef97a2451d437763e51f32ad7d11ca552c not found: ID does not exist" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.996388 4684 scope.go:117] "RemoveContainer" containerID="101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58" Oct 13 14:11:24 crc kubenswrapper[4684]: E1013 14:11:24.996898 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58\": container with ID starting with 101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58 not found: ID does not exist" containerID="101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.996950 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58"} err="failed to get container status \"101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58\": rpc error: code = NotFound desc = could not find container \"101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58\": container with ID starting with 101fd0b4c5de7847323bca79174ff7f279d49acb839cb4c302fce26b6ae16b58 not found: ID does not exist" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.996977 4684 scope.go:117] "RemoveContainer" containerID="84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a" Oct 13 14:11:24 crc kubenswrapper[4684]: E1013 14:11:24.997272 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a\": container with ID starting with 84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a not found: ID does not exist" containerID="84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a" Oct 13 14:11:24 crc kubenswrapper[4684]: I1013 14:11:24.997305 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a"} err="failed to get container status \"84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a\": rpc error: code = NotFound desc = could not find container \"84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a\": container with ID starting with 84869b0d4404f2f7b29baad24b68b14955d75180ffe45cf072b2469f16eff31a not found: ID does not exist" Oct 13 14:11:25 crc kubenswrapper[4684]: I1013 14:11:25.355412 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8dbf9d98-ml255_cafbe5b0-5ce7-4f2b-ac20-4f95592dc662/barbican-api/0.log" Oct 13 14:11:25 crc kubenswrapper[4684]: I1013 14:11:25.580527 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8dbf9d98-ml255_cafbe5b0-5ce7-4f2b-ac20-4f95592dc662/barbican-api-log/0.log" Oct 13 14:11:25 crc kubenswrapper[4684]: I1013 14:11:25.683092 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-746497c59d-h7fpp_b3b2549e-a0de-4650-95fb-c3b8c8998664/barbican-keystone-listener/0.log" Oct 13 14:11:25 crc kubenswrapper[4684]: I1013 14:11:25.733177 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-746497c59d-h7fpp_b3b2549e-a0de-4650-95fb-c3b8c8998664/barbican-keystone-listener-log/0.log" Oct 13 14:11:25 crc kubenswrapper[4684]: I1013 14:11:25.861115 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9b968d8f-c6bhw_290ff522-789b-4ba3-90d4-2047bf14a6de/barbican-worker/0.log" Oct 13 14:11:25 crc kubenswrapper[4684]: I1013 14:11:25.938161 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9b968d8f-c6bhw_290ff522-789b-4ba3-90d4-2047bf14a6de/barbican-worker-log/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.035343 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kcjsm_4bf002ba-ce9a-40ad-a860-0572fc61d996/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.179836 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/ceilometer-notification-agent/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.221998 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/ceilometer-central-agent/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.243656 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/proxy-httpd/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.311913 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ca38eab6-0be1-4f5c-b101-d52cd9bf8fb6/sg-core/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.356068 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:11:26 crc kubenswrapper[4684]: E1013 14:11:26.356346 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.366371 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" path="/var/lib/kubelet/pods/f1afefc8-4d59-4463-8943-a18f3c141455/volumes" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.499802 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e/cinder-api-log/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.542506 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_4873cf0b-4aca-4d5a-a7d0-4cbc8ceb1b1e/cinder-api/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.694867 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_17d5a5ba-88b1-4769-a5fe-5f950804f332/cinder-scheduler/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.782779 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_17d5a5ba-88b1-4769-a5fe-5f950804f332/probe/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.805640 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-c67wg_58124363-4632-4dec-894c-b3c3c289a6f0/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:26 crc kubenswrapper[4684]: I1013 14:11:26.987164 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8hx4f_0b60be99-cf7b-40e7-8c3b-539d082dd005/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.121033 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-9gnxg_29952780-72b6-4f29-9d43-06e33d6dd41a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.201043 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67567868d9-g9dwp_c04a3f8c-5d8c-4c33-8964-e31de4003949/init/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.424175 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67567868d9-g9dwp_c04a3f8c-5d8c-4c33-8964-e31de4003949/init/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.458417 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-28bmr_6c328864-8f33-4897-8fa7-9f0feee4fbf9/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.508765 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67567868d9-g9dwp_c04a3f8c-5d8c-4c33-8964-e31de4003949/dnsmasq-dns/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.664761 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_80891967-a94b-4146-b440-cc217b235eee/glance-log/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.726490 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_80891967-a94b-4146-b440-cc217b235eee/glance-httpd/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.838063 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_615ce132-dbef-40c4-afd3-871c94b552ed/glance-httpd/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.876861 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_615ce132-dbef-40c4-afd3-871c94b552ed/glance-log/0.log" Oct 13 14:11:27 crc kubenswrapper[4684]: I1013 14:11:27.960690 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hj69t_7a51a7e3-30bb-4bce-889c-b13d919ef64c/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.072558 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-kx22x_aabcf2cf-0d17-4864-a8f6-55220ed4c45c/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.283839 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29339401-8sg44_c58f1c18-97fd-4e58-a0e4-9bd84740c0f5/keystone-cron/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.318815 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5c87fbbfdb-hnmd8_6b42944c-2de0-47a6-bdb1-70750adb4c3c/keystone-api/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.499044 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_09ad2e1c-80bb-4900-8c6b-346959ee7994/kube-state-metrics/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.558155 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-cj82r_4281de73-4320-444b-9d71-877c9cf226a0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.972465 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594469df77-tzkgb_c1c77198-2e43-4230-a37d-48e65e09f466/neutron-api/0.log" Oct 13 14:11:28 crc kubenswrapper[4684]: I1013 14:11:28.992372 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594469df77-tzkgb_c1c77198-2e43-4230-a37d-48e65e09f466/neutron-httpd/0.log" Oct 13 14:11:29 crc kubenswrapper[4684]: I1013 14:11:29.189669 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-zfsf7_976ab3d8-44f4-4005-a286-439105b3d942/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:29 crc kubenswrapper[4684]: I1013 14:11:29.503627 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8332faf7-d7ae-421c-9f02-c17e93f044a5/nova-api-log/0.log" Oct 13 14:11:29 crc kubenswrapper[4684]: I1013 14:11:29.722946 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_f4a657cc-d0c5-4c0d-8171-ee4acd5788ce/nova-cell0-conductor-conductor/0.log" Oct 13 14:11:29 crc kubenswrapper[4684]: I1013 14:11:29.906940 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_183e5916-8b09-4bef-85d2-ee83326d865d/nova-cell1-conductor-conductor/0.log" Oct 13 14:11:29 crc kubenswrapper[4684]: I1013 14:11:29.933471 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8332faf7-d7ae-421c-9f02-c17e93f044a5/nova-api-api/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.139037 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_15cff776-4185-4994-b294-c4ba8e704bb1/nova-cell1-novncproxy-novncproxy/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.168063 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-qxhq2_777b6ddf-59c7-4afc-841b-098fe5353aea/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.436565 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6ce23c3b-fb2a-4762-9a9e-f3581e9150ca/nova-metadata-log/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.602368 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_245c72eb-1ac1-4101-ae07-26a94bdae8e1/nova-scheduler-scheduler/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.696832 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ab39473e-47b6-4570-b1f6-f81ee811c19f/mysql-bootstrap/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.825487 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ab39473e-47b6-4570-b1f6-f81ee811c19f/galera/0.log" Oct 13 14:11:30 crc kubenswrapper[4684]: I1013 14:11:30.869307 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ab39473e-47b6-4570-b1f6-f81ee811c19f/mysql-bootstrap/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.002750 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4d4341d2-642e-4c4c-b517-edb89e87d1f0/mysql-bootstrap/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.187739 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4d4341d2-642e-4c4c-b517-edb89e87d1f0/mysql-bootstrap/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.269640 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4d4341d2-642e-4c4c-b517-edb89e87d1f0/galera/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.379198 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_706a05c9-47ec-4b10-a5de-227f67b3be61/openstackclient/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.502108 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jgthj_a984144e-7322-4045-a696-7ec4b746e061/ovn-controller/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.654797 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-fwm22_846b5496-bd3a-41b4-aef4-546dd2e85a83/openstack-network-exporter/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.736372 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6ce23c3b-fb2a-4762-9a9e-f3581e9150ca/nova-metadata-metadata/0.log" Oct 13 14:11:31 crc kubenswrapper[4684]: I1013 14:11:31.787337 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovsdb-server-init/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.006890 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovsdb-server-init/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.010493 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovs-vswitchd/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.044824 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fc6hl_2b902966-f950-4166-b594-afacd52e5346/ovsdb-server/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.213403 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a0f64874-c6da-4e22-b3ba-29679844c1e2/openstack-network-exporter/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.231545 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-p8cjk_00a6e3dc-bdab-4eab-924c-37c33fecad3e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.292741 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a0f64874-c6da-4e22-b3ba-29679844c1e2/ovn-northd/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.452239 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_40f06c76-a8c8-4f1c-a8b4-49fe81d3912e/ovsdbserver-nb/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.479346 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_40f06c76-a8c8-4f1c-a8b4-49fe81d3912e/openstack-network-exporter/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.623409 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b199f677-58e1-4a16-b904-5517b06a2b5e/openstack-network-exporter/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.740067 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b199f677-58e1-4a16-b904-5517b06a2b5e/ovsdbserver-sb/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.814077 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5854cb58cb-4hhrx_a8cfa6a6-f09c-4903-a8ce-df37542f7fd2/placement-api/0.log" Oct 13 14:11:32 crc kubenswrapper[4684]: I1013 14:11:32.951115 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5854cb58cb-4hhrx_a8cfa6a6-f09c-4903-a8ce-df37542f7fd2/placement-log/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.036442 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_26f3f75b-5e4f-4f4c-b8ec-53352400c7ef/setup-container/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.196837 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_26f3f75b-5e4f-4f4c-b8ec-53352400c7ef/rabbitmq/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.229521 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_26f3f75b-5e4f-4f4c-b8ec-53352400c7ef/setup-container/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.272548 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7db9b5b9-e5be-4555-bed6-2fd9d9159b40/setup-container/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.514713 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7db9b5b9-e5be-4555-bed6-2fd9d9159b40/rabbitmq/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.527544 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7db9b5b9-e5be-4555-bed6-2fd9d9159b40/setup-container/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.579874 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-rxhkl_0a757d2e-3642-4d64-9cf8-e0b29e43bbb9/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.727769 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-dcvr6_3af107fc-1ddc-4c90-80c1-c3fed25bddcb/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.824561 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xt5xt_24b2196a-b383-4c65-9c61-992b7305c6ea/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:33 crc kubenswrapper[4684]: I1013 14:11:33.957732 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8hz7n_6a0d23bf-ab14-453f-b23c-eebd64623b73/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.096110 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-l9jlr_1da24dc8-2ab0-47d3-b9a2-4f80d9b25f11/ssh-known-hosts-edpm-deployment/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.333364 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-56f9bb58f9-k8bsc_61ca624d-dfba-4a64-b08f-e96cc583a2b8/proxy-server/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.386824 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-56f9bb58f9-k8bsc_61ca624d-dfba-4a64-b08f-e96cc583a2b8/proxy-httpd/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.420791 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-m58km_b1d3d985-3822-439b-9e3b-629629e83b34/swift-ring-rebalance/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.577078 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-auditor/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.586582 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-reaper/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.712375 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-replicator/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.739845 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/account-server/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.763473 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-auditor/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.810678 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-replicator/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.913532 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-updater/0.log" Oct 13 14:11:34 crc kubenswrapper[4684]: I1013 14:11:34.942093 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/container-server/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.004914 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-auditor/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.068780 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-expirer/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.133952 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-server/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.205627 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-replicator/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.217245 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/object-updater/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.262048 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/rsync/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.344383 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bca9c4e2-0bbb-4828-bd3a-12c0a75b8946/swift-recon-cron/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.502759 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-4zsck_5c4641fd-ca9f-4e0f-a21b-8f6f9ca41879/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.611730 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_cba49bf6-7402-47c1-bc2d-fc49dc6b0e30/tempest-tests-tempest-tests-runner/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.772109 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_77c83104-d58a-4883-ad94-dfdca3feb2d7/test-operator-logs-container/0.log" Oct 13 14:11:35 crc kubenswrapper[4684]: I1013 14:11:35.841733 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-z7qbx_12fac5a6-e3fd-4017-9eee-36aa43193b0c/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 14:11:39 crc kubenswrapper[4684]: I1013 14:11:39.350433 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:11:39 crc kubenswrapper[4684]: E1013 14:11:39.351334 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:11:45 crc kubenswrapper[4684]: I1013 14:11:45.324286 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_73377942-2512-4398-b6ca-25aa9a591619/memcached/0.log" Oct 13 14:11:50 crc kubenswrapper[4684]: I1013 14:11:50.351051 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:11:50 crc kubenswrapper[4684]: E1013 14:11:50.352019 4684 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wns5s_openshift-machine-config-operator(e54ad64a-6df7-4082-afde-d56463121b3f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" Oct 13 14:11:51 crc kubenswrapper[4684]: I1013 14:11:51.236486 4684 scope.go:117] "RemoveContainer" containerID="df464cb4f188283ac25a7a9f200e6a4c8fc16bb0d7af281731e3e1ec177beece" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.279046 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/util/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.468820 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/util/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.468999 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/pull/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.509802 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/pull/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.676314 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/util/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.713357 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/extract/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.713650 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_32da80840a2017f27ed4ad61f02adc64a25aa18e8dad0409953372036ap49gp_d86efc0c-a787-4c60-9ed0-3ffcde968316/pull/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.937056 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-6mg9x_b3b72bfd-179d-4a2c-bbcf-eb318658886d/manager/0.log" Oct 13 14:11:59 crc kubenswrapper[4684]: I1013 14:11:59.965666 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-6mg9x_b3b72bfd-179d-4a2c-bbcf-eb318658886d/kube-rbac-proxy/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.011301 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-fb6dz_5db0ac5c-8b11-488c-8be7-14b040ddee3b/kube-rbac-proxy/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.164699 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-fb6dz_5db0ac5c-8b11-488c-8be7-14b040ddee3b/manager/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.195784 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-flhrp_67029ab5-dc79-4300-acc7-2e4ab2115809/kube-rbac-proxy/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.229745 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-flhrp_67029ab5-dc79-4300-acc7-2e4ab2115809/manager/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.417502 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-rn562_be62f268-d474-44c4-847d-3f43f4e3b724/kube-rbac-proxy/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.482501 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-rn562_be62f268-d474-44c4-847d-3f43f4e3b724/manager/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.556430 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-ggcts_23dafef9-64c0-4b93-b9e6-4c5d00a94a37/kube-rbac-proxy/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.637709 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-ggcts_23dafef9-64c0-4b93-b9e6-4c5d00a94a37/manager/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.685334 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-k9gct_f5eb77ba-26c3-431f-88aa-43c9ca4e1137/kube-rbac-proxy/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.787767 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-k9gct_f5eb77ba-26c3-431f-88aa-43c9ca4e1137/manager/0.log" Oct 13 14:12:00 crc kubenswrapper[4684]: I1013 14:12:00.888611 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-cdp57_de604670-9b37-401b-a41e-de24f939ddfa/kube-rbac-proxy/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.040717 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-cdp57_de604670-9b37-401b-a41e-de24f939ddfa/manager/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.064850 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-fpcxc_a89d3329-f603-48e0-a781-726e723ddba2/kube-rbac-proxy/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.139534 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-fpcxc_a89d3329-f603-48e0-a781-726e723ddba2/manager/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.219750 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-nw27r_c4cff4cc-1db4-4c49-821e-c24204a45224/kube-rbac-proxy/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.316112 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-nw27r_c4cff4cc-1db4-4c49-821e-c24204a45224/manager/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.434011 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-wmg4z_738e9416-e337-4ae6-89e3-40ce9e1843b6/kube-rbac-proxy/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.475479 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-wmg4z_738e9416-e337-4ae6-89e3-40ce9e1843b6/manager/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.596828 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-l2r6f_d99f99c5-94dd-4239-a410-b4983e91974f/kube-rbac-proxy/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.625780 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-l2r6f_d99f99c5-94dd-4239-a410-b4983e91974f/manager/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.809655 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-cmd4z_210a7413-8bab-4428-adc6-be6bb0037a24/kube-rbac-proxy/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.901437 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-cmd4z_210a7413-8bab-4428-adc6-be6bb0037a24/manager/0.log" Oct 13 14:12:01 crc kubenswrapper[4684]: I1013 14:12:01.939636 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-nrqjb_d6bea786-6aa7-4ad2-ae0f-5b61f4bde746/kube-rbac-proxy/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.109070 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-hhc52_f9b4d8c9-c763-42ce-bb43-3661fd211396/kube-rbac-proxy/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.111267 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-nrqjb_d6bea786-6aa7-4ad2-ae0f-5b61f4bde746/manager/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.209553 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-hhc52_f9b4d8c9-c763-42ce-bb43-3661fd211396/manager/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.383459 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55b7d44848pz9s5_a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8/kube-rbac-proxy/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.438425 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55b7d44848pz9s5_a2b3a7f1-37ee-4a85-ac7e-c68e74da2aa8/manager/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.616632 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fb8c88b76-kqqbz_a55aa929-2380-4b40-8d61-a9c00ea48536/kube-rbac-proxy/0.log" Oct 13 14:12:02 crc kubenswrapper[4684]: I1013 14:12:02.894887 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-64895cd698-7gflz_a5d7b84e-4a82-4671-b69f-ec15f4446875/kube-rbac-proxy/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.027878 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-64895cd698-7gflz_a5d7b84e-4a82-4671-b69f-ec15f4446875/operator/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.132666 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-6rh8x_f355c9e7-5235-4ab1-891a-006f5c66de34/registry-server/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.159587 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79df5fb58c-rjcff_fc06efe2-69bb-4b8c-bade-ac2f86aedf8f/kube-rbac-proxy/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.339144 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79df5fb58c-rjcff_fc06efe2-69bb-4b8c-bade-ac2f86aedf8f/manager/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.351845 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.362136 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-l4m6r_1ea7dea4-73ae-4444-a1e8-9704cb1f32e5/kube-rbac-proxy/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.411078 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-l4m6r_1ea7dea4-73ae-4444-a1e8-9704cb1f32e5/manager/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.683328 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-2bq8n_495f3276-fe48-4709-9e4e-2e57fbf3a5a6/operator/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.687380 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-zbvph_77101b92-a0b5-4715-b426-918edba4833c/kube-rbac-proxy/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.813829 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fb8c88b76-kqqbz_a55aa929-2380-4b40-8d61-a9c00ea48536/manager/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.825713 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-zbvph_77101b92-a0b5-4715-b426-918edba4833c/manager/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.924956 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-jjkmq_8fb7300d-97a2-4116-bc41-da6d686a12a1/kube-rbac-proxy/0.log" Oct 13 14:12:03 crc kubenswrapper[4684]: I1013 14:12:03.979107 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-jjkmq_8fb7300d-97a2-4116-bc41-da6d686a12a1/manager/0.log" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.064820 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-9cknq_34fad128-5332-4209-ba15-19cc84ccedb6/kube-rbac-proxy/0.log" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.120238 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rc5pb"] Oct 13 14:12:04 crc kubenswrapper[4684]: E1013 14:12:04.120598 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="extract-content" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.120613 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="extract-content" Oct 13 14:12:04 crc kubenswrapper[4684]: E1013 14:12:04.120634 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="registry-server" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.120641 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="registry-server" Oct 13 14:12:04 crc kubenswrapper[4684]: E1013 14:12:04.120651 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="extract-utilities" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.120656 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="extract-utilities" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.120829 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1afefc8-4d59-4463-8943-a18f3c141455" containerName="registry-server" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.122117 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.134619 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rc5pb"] Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.160235 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-9cknq_34fad128-5332-4209-ba15-19cc84ccedb6/manager/0.log" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.233285 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-utilities\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.233825 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-catalog-content\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.233926 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rjp4\" (UniqueName: \"kubernetes.io/projected/b6d2c0ba-c10b-43a2-b203-22aa413e3511-kube-api-access-6rjp4\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.259060 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"cd6a018fd10618a07c0b6ad26ab32d2c11e576539c201c458234f2f41552638a"} Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.298153 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-52z6n_ba30b561-090d-4067-98e4-215b0c717ce2/kube-rbac-proxy/0.log" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.335835 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-utilities\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.335922 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-catalog-content\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.335947 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rjp4\" (UniqueName: \"kubernetes.io/projected/b6d2c0ba-c10b-43a2-b203-22aa413e3511-kube-api-access-6rjp4\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.336814 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-utilities\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.336862 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-catalog-content\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.356315 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rjp4\" (UniqueName: \"kubernetes.io/projected/b6d2c0ba-c10b-43a2-b203-22aa413e3511-kube-api-access-6rjp4\") pod \"redhat-operators-rc5pb\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.395405 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-52z6n_ba30b561-090d-4067-98e4-215b0c717ce2/manager/0.log" Oct 13 14:12:04 crc kubenswrapper[4684]: I1013 14:12:04.492313 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:05 crc kubenswrapper[4684]: I1013 14:12:05.023787 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rc5pb"] Oct 13 14:12:05 crc kubenswrapper[4684]: I1013 14:12:05.271945 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerStarted","Data":"2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25"} Oct 13 14:12:05 crc kubenswrapper[4684]: I1013 14:12:05.272216 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerStarted","Data":"ebd95b1e737fde4ef5e59206568f0ef4e8e23b0942e7ce7b3660c7f89a712556"} Oct 13 14:12:06 crc kubenswrapper[4684]: I1013 14:12:06.289401 4684 generic.go:334] "Generic (PLEG): container finished" podID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerID="2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25" exitCode=0 Oct 13 14:12:06 crc kubenswrapper[4684]: I1013 14:12:06.289521 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerDied","Data":"2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25"} Oct 13 14:12:08 crc kubenswrapper[4684]: I1013 14:12:08.306220 4684 generic.go:334] "Generic (PLEG): container finished" podID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerID="21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d" exitCode=0 Oct 13 14:12:08 crc kubenswrapper[4684]: I1013 14:12:08.306322 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerDied","Data":"21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d"} Oct 13 14:12:09 crc kubenswrapper[4684]: I1013 14:12:09.316493 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerStarted","Data":"8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8"} Oct 13 14:12:09 crc kubenswrapper[4684]: I1013 14:12:09.331685 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rc5pb" podStartSLOduration=2.876754397 podStartE2EDuration="5.331663234s" podCreationTimestamp="2025-10-13 14:12:04 +0000 UTC" firstStartedPulling="2025-10-13 14:12:06.294131131 +0000 UTC m=+3880.861515201" lastFinishedPulling="2025-10-13 14:12:08.749039968 +0000 UTC m=+3883.316424038" observedRunningTime="2025-10-13 14:12:09.331198979 +0000 UTC m=+3883.898583039" watchObservedRunningTime="2025-10-13 14:12:09.331663234 +0000 UTC m=+3883.899047314" Oct 13 14:12:14 crc kubenswrapper[4684]: I1013 14:12:14.493360 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:14 crc kubenswrapper[4684]: I1013 14:12:14.494117 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:14 crc kubenswrapper[4684]: I1013 14:12:14.539695 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:15 crc kubenswrapper[4684]: I1013 14:12:15.412834 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:15 crc kubenswrapper[4684]: I1013 14:12:15.471001 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rc5pb"] Oct 13 14:12:17 crc kubenswrapper[4684]: I1013 14:12:17.385785 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rc5pb" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="registry-server" containerID="cri-o://8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8" gracePeriod=2 Oct 13 14:12:17 crc kubenswrapper[4684]: I1013 14:12:17.810208 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:17 crc kubenswrapper[4684]: I1013 14:12:17.996204 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-utilities\") pod \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " Oct 13 14:12:17 crc kubenswrapper[4684]: I1013 14:12:17.996409 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-catalog-content\") pod \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " Oct 13 14:12:17 crc kubenswrapper[4684]: I1013 14:12:17.996448 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rjp4\" (UniqueName: \"kubernetes.io/projected/b6d2c0ba-c10b-43a2-b203-22aa413e3511-kube-api-access-6rjp4\") pod \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\" (UID: \"b6d2c0ba-c10b-43a2-b203-22aa413e3511\") " Oct 13 14:12:17 crc kubenswrapper[4684]: I1013 14:12:17.996944 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-utilities" (OuterVolumeSpecName: "utilities") pod "b6d2c0ba-c10b-43a2-b203-22aa413e3511" (UID: "b6d2c0ba-c10b-43a2-b203-22aa413e3511"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.004108 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6d2c0ba-c10b-43a2-b203-22aa413e3511-kube-api-access-6rjp4" (OuterVolumeSpecName: "kube-api-access-6rjp4") pod "b6d2c0ba-c10b-43a2-b203-22aa413e3511" (UID: "b6d2c0ba-c10b-43a2-b203-22aa413e3511"). InnerVolumeSpecName "kube-api-access-6rjp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.077956 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6d2c0ba-c10b-43a2-b203-22aa413e3511" (UID: "b6d2c0ba-c10b-43a2-b203-22aa413e3511"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.098503 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.098543 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rjp4\" (UniqueName: \"kubernetes.io/projected/b6d2c0ba-c10b-43a2-b203-22aa413e3511-kube-api-access-6rjp4\") on node \"crc\" DevicePath \"\"" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.098555 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d2c0ba-c10b-43a2-b203-22aa413e3511-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.397023 4684 generic.go:334] "Generic (PLEG): container finished" podID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerID="8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8" exitCode=0 Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.397085 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rc5pb" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.397120 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerDied","Data":"8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8"} Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.397172 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rc5pb" event={"ID":"b6d2c0ba-c10b-43a2-b203-22aa413e3511","Type":"ContainerDied","Data":"ebd95b1e737fde4ef5e59206568f0ef4e8e23b0942e7ce7b3660c7f89a712556"} Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.397196 4684 scope.go:117] "RemoveContainer" containerID="8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.435493 4684 scope.go:117] "RemoveContainer" containerID="21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.438917 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rc5pb"] Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.448718 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rc5pb"] Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.471370 4684 scope.go:117] "RemoveContainer" containerID="2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.508180 4684 scope.go:117] "RemoveContainer" containerID="8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8" Oct 13 14:12:18 crc kubenswrapper[4684]: E1013 14:12:18.509292 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8\": container with ID starting with 8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8 not found: ID does not exist" containerID="8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.509318 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8"} err="failed to get container status \"8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8\": rpc error: code = NotFound desc = could not find container \"8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8\": container with ID starting with 8b5d918f961db6d8e073d8169197d518061c2bcfd4209dbea411541f80aa7dc8 not found: ID does not exist" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.509338 4684 scope.go:117] "RemoveContainer" containerID="21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d" Oct 13 14:12:18 crc kubenswrapper[4684]: E1013 14:12:18.509660 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d\": container with ID starting with 21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d not found: ID does not exist" containerID="21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.509695 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d"} err="failed to get container status \"21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d\": rpc error: code = NotFound desc = could not find container \"21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d\": container with ID starting with 21ed57024d1b4a17d8ebc983352eefa729816ae7a31534f6c6c7305a2b144e2d not found: ID does not exist" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.509719 4684 scope.go:117] "RemoveContainer" containerID="2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25" Oct 13 14:12:18 crc kubenswrapper[4684]: E1013 14:12:18.510010 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25\": container with ID starting with 2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25 not found: ID does not exist" containerID="2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25" Oct 13 14:12:18 crc kubenswrapper[4684]: I1013 14:12:18.510041 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25"} err="failed to get container status \"2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25\": rpc error: code = NotFound desc = could not find container \"2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25\": container with ID starting with 2e1eef17b4f1ca22da23e26f2842059bc8c34a8b89aa8f10750ede19926fae25 not found: ID does not exist" Oct 13 14:12:20 crc kubenswrapper[4684]: I1013 14:12:20.360205 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" path="/var/lib/kubelet/pods/b6d2c0ba-c10b-43a2-b203-22aa413e3511/volumes" Oct 13 14:12:21 crc kubenswrapper[4684]: I1013 14:12:21.058672 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-ptz8b_de6c8979-1887-4e08-9439-f6654ced778f/control-plane-machine-set-operator/0.log" Oct 13 14:12:21 crc kubenswrapper[4684]: I1013 14:12:21.189210 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-87shb_bf558883-5672-46d6-9d8a-a08070751a86/kube-rbac-proxy/0.log" Oct 13 14:12:21 crc kubenswrapper[4684]: I1013 14:12:21.231022 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-87shb_bf558883-5672-46d6-9d8a-a08070751a86/machine-api-operator/0.log" Oct 13 14:12:32 crc kubenswrapper[4684]: I1013 14:12:32.295476 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-4xcpj_f65d7f2f-54ed-4d01-b143-82edecc32788/cert-manager-controller/0.log" Oct 13 14:12:32 crc kubenswrapper[4684]: I1013 14:12:32.498623 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-58lpp_d50e48e4-4e31-4a3b-bf2f-69dcbfdf8ef4/cert-manager-cainjector/0.log" Oct 13 14:12:32 crc kubenswrapper[4684]: I1013 14:12:32.542861 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-ldckd_5689e64c-b30a-4009-ad81-2ace50352b94/cert-manager-webhook/0.log" Oct 13 14:12:43 crc kubenswrapper[4684]: I1013 14:12:43.724317 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-vkcdq_5608900f-c09f-4810-bde2-87588dadfe55/nmstate-console-plugin/0.log" Oct 13 14:12:43 crc kubenswrapper[4684]: I1013 14:12:43.894482 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xsv76_1fd2ac9e-de15-4598-a050-44f58f8e8a50/nmstate-handler/0.log" Oct 13 14:12:43 crc kubenswrapper[4684]: I1013 14:12:43.986580 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-r2xpx_aad74a49-543f-4f01-a171-0ca010ba7319/kube-rbac-proxy/0.log" Oct 13 14:12:43 crc kubenswrapper[4684]: I1013 14:12:43.995003 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-r2xpx_aad74a49-543f-4f01-a171-0ca010ba7319/nmstate-metrics/0.log" Oct 13 14:12:44 crc kubenswrapper[4684]: I1013 14:12:44.135193 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-c68bl_bf38af4f-4552-46e1-8011-0e8924331c2f/nmstate-operator/0.log" Oct 13 14:12:44 crc kubenswrapper[4684]: I1013 14:12:44.233520 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-hw69f_a6d3dd3f-7c9d-45b9-8f81-dd85f2554caa/nmstate-webhook/0.log" Oct 13 14:12:51 crc kubenswrapper[4684]: I1013 14:12:51.360777 4684 scope.go:117] "RemoveContainer" containerID="7179e3162b8efa0bec7e33d8143ae52f7a225d873e28cb928a3505f0a9bb8fbe" Oct 13 14:12:56 crc kubenswrapper[4684]: I1013 14:12:56.823806 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-85wm6_ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac/kube-rbac-proxy/0.log" Oct 13 14:12:56 crc kubenswrapper[4684]: I1013 14:12:56.948830 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-85wm6_ce2e4042-84e3-41dd-ac0f-a39a3d4cb7ac/controller/0.log" Oct 13 14:12:56 crc kubenswrapper[4684]: I1013 14:12:56.998940 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-nwgv6_642eea78-8230-4b4a-b1f3-1a96d1d8942f/frr-k8s-webhook-server/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.128127 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.315581 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.327896 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.338569 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.368664 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.512129 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.531884 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.565593 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.577768 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.744141 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-reloader/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.747890 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-frr-files/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.759796 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/cp-metrics/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.762262 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/controller/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.916539 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/frr-metrics/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.932675 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/kube-rbac-proxy-frr/0.log" Oct 13 14:12:57 crc kubenswrapper[4684]: I1013 14:12:57.939886 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/kube-rbac-proxy/0.log" Oct 13 14:12:58 crc kubenswrapper[4684]: I1013 14:12:58.149311 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/reloader/0.log" Oct 13 14:12:58 crc kubenswrapper[4684]: I1013 14:12:58.169323 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-745c4c95f5-j54v2_c231fd74-600a-46ff-ba30-605a9445b002/manager/0.log" Oct 13 14:12:58 crc kubenswrapper[4684]: I1013 14:12:58.325969 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6cbcf8c94c-hkxm8_b2944a80-992b-4799-a461-82f7c2398295/webhook-server/0.log" Oct 13 14:12:58 crc kubenswrapper[4684]: I1013 14:12:58.493608 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hprww_2ff5f48b-1277-4a47-af89-e71172d731d6/kube-rbac-proxy/0.log" Oct 13 14:12:58 crc kubenswrapper[4684]: I1013 14:12:58.924338 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hprww_2ff5f48b-1277-4a47-af89-e71172d731d6/speaker/0.log" Oct 13 14:12:59 crc kubenswrapper[4684]: I1013 14:12:59.370684 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zgfqf_2422c566-8db9-4af9-824e-0c9c91ab12b8/frr/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.075641 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/util/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.285886 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/pull/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.286117 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/pull/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.286515 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/util/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.437992 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/extract/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.455517 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/util/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.483191 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d26czks_bcc2f7fe-8648-4e50-946b-b0792d150f63/pull/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.625206 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-utilities/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.784186 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-utilities/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.800343 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-content/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.807213 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-content/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.982712 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-utilities/0.log" Oct 13 14:13:10 crc kubenswrapper[4684]: I1013 14:13:10.983103 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/extract-content/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.230230 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-utilities/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.391205 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-utilities/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.434451 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-content/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.473842 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-content/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.507698 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2dqll_e5497e7c-0895-4f48-bc23-89ed3b1b5fb5/registry-server/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.664863 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-utilities/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.739025 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/extract-content/0.log" Oct 13 14:13:11 crc kubenswrapper[4684]: I1013 14:13:11.908739 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/util/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.080444 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8spk8_4262fc7d-7332-42ac-9a54-06e4dc3c4d46/registry-server/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.125652 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/pull/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.151587 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/util/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.174498 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/pull/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.327884 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/pull/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.335627 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/util/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.356992 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835chzptt_03260d40-6a40-406f-b8a9-7898ae8a3b16/extract/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.504035 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6t588_cc8221bd-07d6-42ed-b1dd-d81881844b60/marketplace-operator/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.527383 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-utilities/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.692558 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-content/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.698800 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-utilities/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.743935 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-content/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.967572 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-content/0.log" Oct 13 14:13:12 crc kubenswrapper[4684]: I1013 14:13:12.973289 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/extract-utilities/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.115773 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h5qdd_4fc769db-2b2f-431c-bef7-1b3c46fe628c/registry-server/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.192204 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-utilities/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.361602 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-content/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.361769 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-content/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.362805 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-utilities/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.556673 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-utilities/0.log" Oct 13 14:13:13 crc kubenswrapper[4684]: I1013 14:13:13.563923 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/extract-content/0.log" Oct 13 14:13:14 crc kubenswrapper[4684]: I1013 14:13:14.132882 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-qzj9z_ef7652d6-3ac3-4739-a190-5b071c4d0839/registry-server/0.log" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.562775 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gdlh2"] Oct 13 14:13:41 crc kubenswrapper[4684]: E1013 14:13:41.564704 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="extract-utilities" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.564809 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="extract-utilities" Oct 13 14:13:41 crc kubenswrapper[4684]: E1013 14:13:41.565198 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="registry-server" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.565295 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="registry-server" Oct 13 14:13:41 crc kubenswrapper[4684]: E1013 14:13:41.565373 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="extract-content" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.565445 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="extract-content" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.565962 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6d2c0ba-c10b-43a2-b203-22aa413e3511" containerName="registry-server" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.567758 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.577309 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gdlh2"] Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.705817 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-utilities\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.705987 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bzbm\" (UniqueName: \"kubernetes.io/projected/5cec8731-1249-4f3d-b11b-b7fa3bcff956-kube-api-access-7bzbm\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.706022 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-catalog-content\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.807604 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-utilities\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.807701 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bzbm\" (UniqueName: \"kubernetes.io/projected/5cec8731-1249-4f3d-b11b-b7fa3bcff956-kube-api-access-7bzbm\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.807723 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-catalog-content\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.808122 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-utilities\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.808217 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-catalog-content\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.829887 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bzbm\" (UniqueName: \"kubernetes.io/projected/5cec8731-1249-4f3d-b11b-b7fa3bcff956-kube-api-access-7bzbm\") pod \"community-operators-gdlh2\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:41 crc kubenswrapper[4684]: I1013 14:13:41.896733 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:42 crc kubenswrapper[4684]: I1013 14:13:42.575198 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gdlh2"] Oct 13 14:13:43 crc kubenswrapper[4684]: I1013 14:13:43.127882 4684 generic.go:334] "Generic (PLEG): container finished" podID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerID="328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e" exitCode=0 Oct 13 14:13:43 crc kubenswrapper[4684]: I1013 14:13:43.128181 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerDied","Data":"328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e"} Oct 13 14:13:43 crc kubenswrapper[4684]: I1013 14:13:43.128206 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerStarted","Data":"5d7ce5d9a45413a1763076792b31e1e6cc35378ace16c00294f3ec777d37f521"} Oct 13 14:13:45 crc kubenswrapper[4684]: I1013 14:13:45.148719 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerStarted","Data":"1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc"} Oct 13 14:13:47 crc kubenswrapper[4684]: I1013 14:13:47.196008 4684 generic.go:334] "Generic (PLEG): container finished" podID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerID="1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc" exitCode=0 Oct 13 14:13:47 crc kubenswrapper[4684]: I1013 14:13:47.196078 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerDied","Data":"1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc"} Oct 13 14:13:48 crc kubenswrapper[4684]: I1013 14:13:48.211631 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerStarted","Data":"fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4"} Oct 13 14:13:48 crc kubenswrapper[4684]: I1013 14:13:48.240564 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gdlh2" podStartSLOduration=2.72250984 podStartE2EDuration="7.240536035s" podCreationTimestamp="2025-10-13 14:13:41 +0000 UTC" firstStartedPulling="2025-10-13 14:13:43.131134538 +0000 UTC m=+3977.698518598" lastFinishedPulling="2025-10-13 14:13:47.649160723 +0000 UTC m=+3982.216544793" observedRunningTime="2025-10-13 14:13:48.234202345 +0000 UTC m=+3982.801586425" watchObservedRunningTime="2025-10-13 14:13:48.240536035 +0000 UTC m=+3982.807920115" Oct 13 14:13:51 crc kubenswrapper[4684]: I1013 14:13:51.897244 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:51 crc kubenswrapper[4684]: I1013 14:13:51.897792 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:51 crc kubenswrapper[4684]: I1013 14:13:51.943280 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:52 crc kubenswrapper[4684]: I1013 14:13:52.291951 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:52 crc kubenswrapper[4684]: I1013 14:13:52.962656 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gdlh2"] Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.261580 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gdlh2" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="registry-server" containerID="cri-o://fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4" gracePeriod=2 Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.722386 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.880186 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-catalog-content\") pod \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.880695 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bzbm\" (UniqueName: \"kubernetes.io/projected/5cec8731-1249-4f3d-b11b-b7fa3bcff956-kube-api-access-7bzbm\") pod \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.880947 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-utilities\") pod \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\" (UID: \"5cec8731-1249-4f3d-b11b-b7fa3bcff956\") " Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.883124 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-utilities" (OuterVolumeSpecName: "utilities") pod "5cec8731-1249-4f3d-b11b-b7fa3bcff956" (UID: "5cec8731-1249-4f3d-b11b-b7fa3bcff956"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:13:54 crc kubenswrapper[4684]: I1013 14:13:54.983413 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.281120 4684 generic.go:334] "Generic (PLEG): container finished" podID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerID="fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4" exitCode=0 Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.281167 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerDied","Data":"fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4"} Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.281194 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gdlh2" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.281340 4684 scope.go:117] "RemoveContainer" containerID="fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.281199 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gdlh2" event={"ID":"5cec8731-1249-4f3d-b11b-b7fa3bcff956","Type":"ContainerDied","Data":"5d7ce5d9a45413a1763076792b31e1e6cc35378ace16c00294f3ec777d37f521"} Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.291727 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cec8731-1249-4f3d-b11b-b7fa3bcff956" (UID: "5cec8731-1249-4f3d-b11b-b7fa3bcff956"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.292299 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cec8731-1249-4f3d-b11b-b7fa3bcff956-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.315221 4684 scope.go:117] "RemoveContainer" containerID="1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.380842 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cec8731-1249-4f3d-b11b-b7fa3bcff956-kube-api-access-7bzbm" (OuterVolumeSpecName: "kube-api-access-7bzbm") pod "5cec8731-1249-4f3d-b11b-b7fa3bcff956" (UID: "5cec8731-1249-4f3d-b11b-b7fa3bcff956"). InnerVolumeSpecName "kube-api-access-7bzbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.388389 4684 scope.go:117] "RemoveContainer" containerID="328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.394217 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bzbm\" (UniqueName: \"kubernetes.io/projected/5cec8731-1249-4f3d-b11b-b7fa3bcff956-kube-api-access-7bzbm\") on node \"crc\" DevicePath \"\"" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.483791 4684 scope.go:117] "RemoveContainer" containerID="fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4" Oct 13 14:13:55 crc kubenswrapper[4684]: E1013 14:13:55.484329 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4\": container with ID starting with fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4 not found: ID does not exist" containerID="fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.484355 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4"} err="failed to get container status \"fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4\": rpc error: code = NotFound desc = could not find container \"fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4\": container with ID starting with fd84cf8d5e4bc661dc55c38aa1e0d88dcb70e2f4110cedb0ca6536340416ece4 not found: ID does not exist" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.484374 4684 scope.go:117] "RemoveContainer" containerID="1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc" Oct 13 14:13:55 crc kubenswrapper[4684]: E1013 14:13:55.484701 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc\": container with ID starting with 1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc not found: ID does not exist" containerID="1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.484720 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc"} err="failed to get container status \"1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc\": rpc error: code = NotFound desc = could not find container \"1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc\": container with ID starting with 1bec580a76a7f8e62c1704306e4a52f70a46d1680d45dbe2e62f5e6d025caccc not found: ID does not exist" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.484733 4684 scope.go:117] "RemoveContainer" containerID="328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e" Oct 13 14:13:55 crc kubenswrapper[4684]: E1013 14:13:55.485092 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e\": container with ID starting with 328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e not found: ID does not exist" containerID="328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.485110 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e"} err="failed to get container status \"328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e\": rpc error: code = NotFound desc = could not find container \"328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e\": container with ID starting with 328412fe19feafea4b9c2b714f98197484153d0e2f9f86185cf1b318a105c15e not found: ID does not exist" Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.611278 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gdlh2"] Oct 13 14:13:55 crc kubenswrapper[4684]: I1013 14:13:55.621219 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gdlh2"] Oct 13 14:13:56 crc kubenswrapper[4684]: I1013 14:13:56.371255 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" path="/var/lib/kubelet/pods/5cec8731-1249-4f3d-b11b-b7fa3bcff956/volumes" Oct 13 14:14:30 crc kubenswrapper[4684]: I1013 14:14:30.560187 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:14:30 crc kubenswrapper[4684]: I1013 14:14:30.560991 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:14:54 crc kubenswrapper[4684]: I1013 14:14:54.893085 4684 generic.go:334] "Generic (PLEG): container finished" podID="06a28563-d823-4490-b3e2-af173ee1b16c" containerID="a3b18e5e9e999d75b463d5450af166c3e8a89d8c3594e0dc2825518c77fd2233" exitCode=0 Oct 13 14:14:54 crc kubenswrapper[4684]: I1013 14:14:54.893200 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-pklvl/must-gather-llxnv" event={"ID":"06a28563-d823-4490-b3e2-af173ee1b16c","Type":"ContainerDied","Data":"a3b18e5e9e999d75b463d5450af166c3e8a89d8c3594e0dc2825518c77fd2233"} Oct 13 14:14:54 crc kubenswrapper[4684]: I1013 14:14:54.894196 4684 scope.go:117] "RemoveContainer" containerID="a3b18e5e9e999d75b463d5450af166c3e8a89d8c3594e0dc2825518c77fd2233" Oct 13 14:14:54 crc kubenswrapper[4684]: I1013 14:14:54.958564 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pklvl_must-gather-llxnv_06a28563-d823-4490-b3e2-af173ee1b16c/gather/0.log" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.157273 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49"] Oct 13 14:15:00 crc kubenswrapper[4684]: E1013 14:15:00.158233 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="extract-utilities" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.158248 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="extract-utilities" Oct 13 14:15:00 crc kubenswrapper[4684]: E1013 14:15:00.158296 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="extract-content" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.158306 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="extract-content" Oct 13 14:15:00 crc kubenswrapper[4684]: E1013 14:15:00.158325 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="registry-server" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.158332 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="registry-server" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.158508 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cec8731-1249-4f3d-b11b-b7fa3bcff956" containerName="registry-server" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.159174 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.161540 4684 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.161704 4684 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.183341 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49"] Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.274920 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/412e2e6e-9690-4f78-b6ae-631988f2b22f-config-volume\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.275163 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf8t5\" (UniqueName: \"kubernetes.io/projected/412e2e6e-9690-4f78-b6ae-631988f2b22f-kube-api-access-gf8t5\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.275492 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/412e2e6e-9690-4f78-b6ae-631988f2b22f-secret-volume\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.377582 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/412e2e6e-9690-4f78-b6ae-631988f2b22f-secret-volume\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.377724 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/412e2e6e-9690-4f78-b6ae-631988f2b22f-config-volume\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.377774 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf8t5\" (UniqueName: \"kubernetes.io/projected/412e2e6e-9690-4f78-b6ae-631988f2b22f-kube-api-access-gf8t5\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.379220 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/412e2e6e-9690-4f78-b6ae-631988f2b22f-config-volume\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.560503 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.560571 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.775202 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/412e2e6e-9690-4f78-b6ae-631988f2b22f-secret-volume\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.782015 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf8t5\" (UniqueName: \"kubernetes.io/projected/412e2e6e-9690-4f78-b6ae-631988f2b22f-kube-api-access-gf8t5\") pod \"collect-profiles-29339415-8fz49\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:00 crc kubenswrapper[4684]: I1013 14:15:00.807470 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:01 crc kubenswrapper[4684]: I1013 14:15:01.244192 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49"] Oct 13 14:15:01 crc kubenswrapper[4684]: I1013 14:15:01.962672 4684 generic.go:334] "Generic (PLEG): container finished" podID="412e2e6e-9690-4f78-b6ae-631988f2b22f" containerID="b15338f662f04c24b2243adc9bbc6548aa09bf1bad2f5b7ea4dbb9db17fdddce" exitCode=0 Oct 13 14:15:01 crc kubenswrapper[4684]: I1013 14:15:01.962790 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" event={"ID":"412e2e6e-9690-4f78-b6ae-631988f2b22f","Type":"ContainerDied","Data":"b15338f662f04c24b2243adc9bbc6548aa09bf1bad2f5b7ea4dbb9db17fdddce"} Oct 13 14:15:01 crc kubenswrapper[4684]: I1013 14:15:01.963488 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" event={"ID":"412e2e6e-9690-4f78-b6ae-631988f2b22f","Type":"ContainerStarted","Data":"052388b0011cb08a4a0cbad6bb2fe97eb1020317a8efa23c4134be17159d133d"} Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.351472 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.470130 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf8t5\" (UniqueName: \"kubernetes.io/projected/412e2e6e-9690-4f78-b6ae-631988f2b22f-kube-api-access-gf8t5\") pod \"412e2e6e-9690-4f78-b6ae-631988f2b22f\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.470264 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/412e2e6e-9690-4f78-b6ae-631988f2b22f-secret-volume\") pod \"412e2e6e-9690-4f78-b6ae-631988f2b22f\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.470370 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/412e2e6e-9690-4f78-b6ae-631988f2b22f-config-volume\") pod \"412e2e6e-9690-4f78-b6ae-631988f2b22f\" (UID: \"412e2e6e-9690-4f78-b6ae-631988f2b22f\") " Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.471561 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/412e2e6e-9690-4f78-b6ae-631988f2b22f-config-volume" (OuterVolumeSpecName: "config-volume") pod "412e2e6e-9690-4f78-b6ae-631988f2b22f" (UID: "412e2e6e-9690-4f78-b6ae-631988f2b22f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.489111 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412e2e6e-9690-4f78-b6ae-631988f2b22f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "412e2e6e-9690-4f78-b6ae-631988f2b22f" (UID: "412e2e6e-9690-4f78-b6ae-631988f2b22f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.489181 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/412e2e6e-9690-4f78-b6ae-631988f2b22f-kube-api-access-gf8t5" (OuterVolumeSpecName: "kube-api-access-gf8t5") pod "412e2e6e-9690-4f78-b6ae-631988f2b22f" (UID: "412e2e6e-9690-4f78-b6ae-631988f2b22f"). InnerVolumeSpecName "kube-api-access-gf8t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.573119 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf8t5\" (UniqueName: \"kubernetes.io/projected/412e2e6e-9690-4f78-b6ae-631988f2b22f-kube-api-access-gf8t5\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.573162 4684 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/412e2e6e-9690-4f78-b6ae-631988f2b22f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.573173 4684 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/412e2e6e-9690-4f78-b6ae-631988f2b22f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.985550 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" event={"ID":"412e2e6e-9690-4f78-b6ae-631988f2b22f","Type":"ContainerDied","Data":"052388b0011cb08a4a0cbad6bb2fe97eb1020317a8efa23c4134be17159d133d"} Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.985882 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="052388b0011cb08a4a0cbad6bb2fe97eb1020317a8efa23c4134be17159d133d" Oct 13 14:15:03 crc kubenswrapper[4684]: I1013 14:15:03.985593 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339415-8fz49" Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.431304 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn"] Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.439551 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339370-5pbhn"] Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.514623 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-pklvl/must-gather-llxnv"] Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.514889 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-pklvl/must-gather-llxnv" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="copy" containerID="cri-o://f50adaa6fe093a8f0b7ade245e8e5276b58842012588455ee80305ba7b87a829" gracePeriod=2 Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.523833 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-pklvl/must-gather-llxnv"] Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.996358 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pklvl_must-gather-llxnv_06a28563-d823-4490-b3e2-af173ee1b16c/copy/0.log" Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.997338 4684 generic.go:334] "Generic (PLEG): container finished" podID="06a28563-d823-4490-b3e2-af173ee1b16c" containerID="f50adaa6fe093a8f0b7ade245e8e5276b58842012588455ee80305ba7b87a829" exitCode=143 Oct 13 14:15:04 crc kubenswrapper[4684]: I1013 14:15:04.997387 4684 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73279d646e251fc317f991bedcbdccb98caad83d42a635223e41baaf44e51fbe" Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.051482 4684 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-pklvl_must-gather-llxnv_06a28563-d823-4490-b3e2-af173ee1b16c/copy/0.log" Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.051811 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.101810 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6dqd\" (UniqueName: \"kubernetes.io/projected/06a28563-d823-4490-b3e2-af173ee1b16c-kube-api-access-b6dqd\") pod \"06a28563-d823-4490-b3e2-af173ee1b16c\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.101929 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/06a28563-d823-4490-b3e2-af173ee1b16c-must-gather-output\") pod \"06a28563-d823-4490-b3e2-af173ee1b16c\" (UID: \"06a28563-d823-4490-b3e2-af173ee1b16c\") " Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.111097 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a28563-d823-4490-b3e2-af173ee1b16c-kube-api-access-b6dqd" (OuterVolumeSpecName: "kube-api-access-b6dqd") pod "06a28563-d823-4490-b3e2-af173ee1b16c" (UID: "06a28563-d823-4490-b3e2-af173ee1b16c"). InnerVolumeSpecName "kube-api-access-b6dqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.204181 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6dqd\" (UniqueName: \"kubernetes.io/projected/06a28563-d823-4490-b3e2-af173ee1b16c-kube-api-access-b6dqd\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.253215 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06a28563-d823-4490-b3e2-af173ee1b16c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "06a28563-d823-4490-b3e2-af173ee1b16c" (UID: "06a28563-d823-4490-b3e2-af173ee1b16c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:15:05 crc kubenswrapper[4684]: I1013 14:15:05.305963 4684 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/06a28563-d823-4490-b3e2-af173ee1b16c-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:06 crc kubenswrapper[4684]: I1013 14:15:06.004200 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-pklvl/must-gather-llxnv" Oct 13 14:15:06 crc kubenswrapper[4684]: I1013 14:15:06.362875 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" path="/var/lib/kubelet/pods/06a28563-d823-4490-b3e2-af173ee1b16c/volumes" Oct 13 14:15:06 crc kubenswrapper[4684]: I1013 14:15:06.363794 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab1ae631-3915-4339-b5ac-3b190dac48b7" path="/var/lib/kubelet/pods/ab1ae631-3915-4339-b5ac-3b190dac48b7/volumes" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.666824 4684 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qgw2q"] Oct 13 14:15:25 crc kubenswrapper[4684]: E1013 14:15:25.668125 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="gather" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.668147 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="gather" Oct 13 14:15:25 crc kubenswrapper[4684]: E1013 14:15:25.668168 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="copy" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.668176 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="copy" Oct 13 14:15:25 crc kubenswrapper[4684]: E1013 14:15:25.668193 4684 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412e2e6e-9690-4f78-b6ae-631988f2b22f" containerName="collect-profiles" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.668202 4684 state_mem.go:107] "Deleted CPUSet assignment" podUID="412e2e6e-9690-4f78-b6ae-631988f2b22f" containerName="collect-profiles" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.668410 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="gather" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.668474 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="412e2e6e-9690-4f78-b6ae-631988f2b22f" containerName="collect-profiles" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.668487 4684 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a28563-d823-4490-b3e2-af173ee1b16c" containerName="copy" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.670348 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.692705 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgw2q"] Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.729955 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-catalog-content\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.730086 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jftmc\" (UniqueName: \"kubernetes.io/projected/a31d674c-55c3-49bc-a619-9754b0c0d7bb-kube-api-access-jftmc\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.730162 4684 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-utilities\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.831689 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-catalog-content\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.831773 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jftmc\" (UniqueName: \"kubernetes.io/projected/a31d674c-55c3-49bc-a619-9754b0c0d7bb-kube-api-access-jftmc\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.831814 4684 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-utilities\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.832469 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-utilities\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.832485 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-catalog-content\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:25 crc kubenswrapper[4684]: I1013 14:15:25.850988 4684 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jftmc\" (UniqueName: \"kubernetes.io/projected/a31d674c-55c3-49bc-a619-9754b0c0d7bb-kube-api-access-jftmc\") pod \"redhat-marketplace-qgw2q\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:26 crc kubenswrapper[4684]: I1013 14:15:26.018633 4684 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:26 crc kubenswrapper[4684]: I1013 14:15:26.449100 4684 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgw2q"] Oct 13 14:15:27 crc kubenswrapper[4684]: I1013 14:15:27.196354 4684 generic.go:334] "Generic (PLEG): container finished" podID="a31d674c-55c3-49bc-a619-9754b0c0d7bb" containerID="6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39" exitCode=0 Oct 13 14:15:27 crc kubenswrapper[4684]: I1013 14:15:27.196499 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgw2q" event={"ID":"a31d674c-55c3-49bc-a619-9754b0c0d7bb","Type":"ContainerDied","Data":"6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39"} Oct 13 14:15:27 crc kubenswrapper[4684]: I1013 14:15:27.196797 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgw2q" event={"ID":"a31d674c-55c3-49bc-a619-9754b0c0d7bb","Type":"ContainerStarted","Data":"d846138bd4ce41921010014303a4dfb626c0557a53d5d5906b9eade55a7c0945"} Oct 13 14:15:29 crc kubenswrapper[4684]: I1013 14:15:29.217938 4684 generic.go:334] "Generic (PLEG): container finished" podID="a31d674c-55c3-49bc-a619-9754b0c0d7bb" containerID="c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4" exitCode=0 Oct 13 14:15:29 crc kubenswrapper[4684]: I1013 14:15:29.218029 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgw2q" event={"ID":"a31d674c-55c3-49bc-a619-9754b0c0d7bb","Type":"ContainerDied","Data":"c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4"} Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.230979 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgw2q" event={"ID":"a31d674c-55c3-49bc-a619-9754b0c0d7bb","Type":"ContainerStarted","Data":"23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c"} Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.260013 4684 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qgw2q" podStartSLOduration=2.7895169920000003 podStartE2EDuration="5.259993068s" podCreationTimestamp="2025-10-13 14:15:25 +0000 UTC" firstStartedPulling="2025-10-13 14:15:27.198699548 +0000 UTC m=+4081.766083618" lastFinishedPulling="2025-10-13 14:15:29.669175604 +0000 UTC m=+4084.236559694" observedRunningTime="2025-10-13 14:15:30.252180052 +0000 UTC m=+4084.819564122" watchObservedRunningTime="2025-10-13 14:15:30.259993068 +0000 UTC m=+4084.827377138" Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.559873 4684 patch_prober.go:28] interesting pod/machine-config-daemon-wns5s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.559962 4684 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.560008 4684 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.560743 4684 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cd6a018fd10618a07c0b6ad26ab32d2c11e576539c201c458234f2f41552638a"} pod="openshift-machine-config-operator/machine-config-daemon-wns5s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 14:15:30 crc kubenswrapper[4684]: I1013 14:15:30.560801 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" podUID="e54ad64a-6df7-4082-afde-d56463121b3f" containerName="machine-config-daemon" containerID="cri-o://cd6a018fd10618a07c0b6ad26ab32d2c11e576539c201c458234f2f41552638a" gracePeriod=600 Oct 13 14:15:31 crc kubenswrapper[4684]: I1013 14:15:31.245093 4684 generic.go:334] "Generic (PLEG): container finished" podID="e54ad64a-6df7-4082-afde-d56463121b3f" containerID="cd6a018fd10618a07c0b6ad26ab32d2c11e576539c201c458234f2f41552638a" exitCode=0 Oct 13 14:15:31 crc kubenswrapper[4684]: I1013 14:15:31.245363 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerDied","Data":"cd6a018fd10618a07c0b6ad26ab32d2c11e576539c201c458234f2f41552638a"} Oct 13 14:15:31 crc kubenswrapper[4684]: I1013 14:15:31.245416 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wns5s" event={"ID":"e54ad64a-6df7-4082-afde-d56463121b3f","Type":"ContainerStarted","Data":"9ec0f1d521282826d8dd8af9c8abe5d6dae634b0b3fb411fabae8486081a8671"} Oct 13 14:15:31 crc kubenswrapper[4684]: I1013 14:15:31.245438 4684 scope.go:117] "RemoveContainer" containerID="d32770f693cd8fc5d184774f3acc506f11a27c0ced3f27d759790056b2dad3c7" Oct 13 14:15:36 crc kubenswrapper[4684]: I1013 14:15:36.020085 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:36 crc kubenswrapper[4684]: I1013 14:15:36.020620 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:36 crc kubenswrapper[4684]: I1013 14:15:36.075045 4684 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:36 crc kubenswrapper[4684]: I1013 14:15:36.339048 4684 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:36 crc kubenswrapper[4684]: I1013 14:15:36.397634 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgw2q"] Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.315135 4684 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qgw2q" podUID="a31d674c-55c3-49bc-a619-9754b0c0d7bb" containerName="registry-server" containerID="cri-o://23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c" gracePeriod=2 Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.780009 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.892724 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-utilities\") pod \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.892869 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-catalog-content\") pod \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.893079 4684 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jftmc\" (UniqueName: \"kubernetes.io/projected/a31d674c-55c3-49bc-a619-9754b0c0d7bb-kube-api-access-jftmc\") pod \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\" (UID: \"a31d674c-55c3-49bc-a619-9754b0c0d7bb\") " Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.895006 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-utilities" (OuterVolumeSpecName: "utilities") pod "a31d674c-55c3-49bc-a619-9754b0c0d7bb" (UID: "a31d674c-55c3-49bc-a619-9754b0c0d7bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.899281 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31d674c-55c3-49bc-a619-9754b0c0d7bb-kube-api-access-jftmc" (OuterVolumeSpecName: "kube-api-access-jftmc") pod "a31d674c-55c3-49bc-a619-9754b0c0d7bb" (UID: "a31d674c-55c3-49bc-a619-9754b0c0d7bb"). InnerVolumeSpecName "kube-api-access-jftmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.908719 4684 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a31d674c-55c3-49bc-a619-9754b0c0d7bb" (UID: "a31d674c-55c3-49bc-a619-9754b0c0d7bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.995182 4684 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.995509 4684 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a31d674c-55c3-49bc-a619-9754b0c0d7bb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:38 crc kubenswrapper[4684]: I1013 14:15:38.995532 4684 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jftmc\" (UniqueName: \"kubernetes.io/projected/a31d674c-55c3-49bc-a619-9754b0c0d7bb-kube-api-access-jftmc\") on node \"crc\" DevicePath \"\"" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.329935 4684 generic.go:334] "Generic (PLEG): container finished" podID="a31d674c-55c3-49bc-a619-9754b0c0d7bb" containerID="23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c" exitCode=0 Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.329985 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgw2q" event={"ID":"a31d674c-55c3-49bc-a619-9754b0c0d7bb","Type":"ContainerDied","Data":"23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c"} Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.330010 4684 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgw2q" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.330045 4684 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgw2q" event={"ID":"a31d674c-55c3-49bc-a619-9754b0c0d7bb","Type":"ContainerDied","Data":"d846138bd4ce41921010014303a4dfb626c0557a53d5d5906b9eade55a7c0945"} Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.330075 4684 scope.go:117] "RemoveContainer" containerID="23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.370325 4684 scope.go:117] "RemoveContainer" containerID="c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.379861 4684 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgw2q"] Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.390477 4684 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgw2q"] Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.395800 4684 scope.go:117] "RemoveContainer" containerID="6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.446135 4684 scope.go:117] "RemoveContainer" containerID="23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c" Oct 13 14:15:39 crc kubenswrapper[4684]: E1013 14:15:39.446809 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c\": container with ID starting with 23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c not found: ID does not exist" containerID="23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.446880 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c"} err="failed to get container status \"23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c\": rpc error: code = NotFound desc = could not find container \"23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c\": container with ID starting with 23e7c87a2ba8f102b3a691eb932037ef8439eedbaf3003962182340e7b74855c not found: ID does not exist" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.446946 4684 scope.go:117] "RemoveContainer" containerID="c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4" Oct 13 14:15:39 crc kubenswrapper[4684]: E1013 14:15:39.447446 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4\": container with ID starting with c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4 not found: ID does not exist" containerID="c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.447505 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4"} err="failed to get container status \"c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4\": rpc error: code = NotFound desc = could not find container \"c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4\": container with ID starting with c2aeaa5c0605f891be27a661d155d5a435fb4a1eeee3b8c98c13b74e260236f4 not found: ID does not exist" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.447528 4684 scope.go:117] "RemoveContainer" containerID="6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39" Oct 13 14:15:39 crc kubenswrapper[4684]: E1013 14:15:39.447968 4684 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39\": container with ID starting with 6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39 not found: ID does not exist" containerID="6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39" Oct 13 14:15:39 crc kubenswrapper[4684]: I1013 14:15:39.448025 4684 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39"} err="failed to get container status \"6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39\": rpc error: code = NotFound desc = could not find container \"6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39\": container with ID starting with 6fdd1f6bfe10607cd3eeca4407c6e9152b10fc176d5808f95336150730c0ed39 not found: ID does not exist" Oct 13 14:15:40 crc kubenswrapper[4684]: I1013 14:15:40.362098 4684 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31d674c-55c3-49bc-a619-9754b0c0d7bb" path="/var/lib/kubelet/pods/a31d674c-55c3-49bc-a619-9754b0c0d7bb/volumes" Oct 13 14:15:51 crc kubenswrapper[4684]: I1013 14:15:51.500982 4684 scope.go:117] "RemoveContainer" containerID="5b3e05c4bd3e83acc3ebf12f73607f75b618c37974eb6c20c538d358f1ba0385" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515073204720024445 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015073204720017362 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015073174300016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015073174300015455 5ustar corecore